diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 869306e3e3f..ec680f39ca0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,10 +5,10 @@ jobs: name: Test runs-on: ubuntu-latest container: - image: python:3.11-bullseye + image: python:3.12-bullseye env: node-version: '20' - mailroom-version: '9.3.3' + mailroom-version: '9.3.59' services: redis: @@ -62,6 +62,11 @@ jobs: ./mailroom -db=postgres://temba:temba@postgres:5432/temba?sslmode=disable -redis=redis://redis:6379/15 -log-level=info > mailroom.log & working-directory: ${{ github.workspace }} + - name: Install and start DynamoDB + uses: rrainn/dynamodb-action@v4.0.0 + with: + port: 6000 + - name: Initialize environment run: | npm install -g yarn less @@ -69,6 +74,7 @@ jobs: yarn install ln -s settings.py.dev temba/settings.py poetry run python manage.py migrate + poetry run python manage.py migrate_dynamo --testing working-directory: ${{ github.workspace }} # https://github.com/actions/runner/issues/2058#issuecomment-2085119510 - name: Run pre-test checks diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f85369474d..c018d37be7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10170 +1,10789 @@ -v9.3.21 (2024-08-19) +v9.3.139 (2024-12-16) ------------------------- - * Use correct URL when breaking spa-container - * Delete API tokens when user deleted and use generate_secret to create new tokens - * Update API token management UI to support multiple tokens + * Remove no longer used waiting_exits from flow inspection -v9.3.20 (2024-08-14) +v9.3.138 (2024-12-16) ------------------------- - * Rework S3 code to always use real S3 clients, even in tests + * Read only servicing for channels claim + * Improve servicing banner -v9.3.19 (2024-08-14) +v9.3.137 (2024-12-12) ------------------------- - * Fix DTOne formax section - * Change default settings to use minio for file storage + * Prevent importing of flows with a spec version that is ahead of the engine -v9.3.18 (2024-08-13) +v9.3.136 (2024-12-11) ------------------------- - * Record when API tokens were last used - * Only support import contacts using .xlsx files with openpyxl + * Remove gauges from analytics abstraction + * Move cron utils to their own package + * Stop writing and squashing old message counts + * Read from new message counts -v9.3.17 (2024-08-12) +v9.3.135 (2024-12-11) ------------------------- - * Data migration to delete old surveyor and prometheus API tokens + * Data migration to backfill new message folder counts -v9.3.16 (2024-08-08) +v9.3.134 (2024-12-10) ------------------------- - * Stop generating prometheus API tokens - * Drop Ticket.body + * Start writing new message folder counts -v9.3.15 (2024-08-08) +v9.3.133 (2024-12-10) ------------------------- - * Add Org.prometheus_token and backill from API tokens + * Bump to flow spec 13.6.1 + * Revert removal of input/text columns from results exports -v9.3.14 (2024-08-08) +v9.3.132 (2024-12-09) ------------------------- - * Update tests to not set ticket body - * Add data migration to move body to ticket on open ticket event + * Update to latest Django 5.1 + * Allow servicing users to preview starts -v9.3.13 (2024-08-08) +v9.3.131 (2024-12-09) ------------------------- - * Show notes on ticket open events in contact history - * Remove body from ticket endpoint documentation - * Update floweditor which now also refers to ticket body as note - * Update open ticket modal to use note instead of body - * Add cutoff date for using viewer role + * Allow servicing users to claim channels + * Use statement level db triggers to maintain group counts -v9.3.12 (2024-08-07) +v9.3.130 (2024-12-07) ------------------------- - * Don't create surveyor user in mailroom test db - * Add warning to manage accounts page if org has viewers - * Remove viewers as an org feature, only allow existing viewer users to remain as viewers - * Update to latest Django + * Allow service exports and inspection of broadcasts -v9.3.11 (2024-08-07) +v9.3.129 (2024-12-04) ------------------------- - * Remove Org.surveyor_password and always disable creating surveyor flows - * Remove non-modal response support from export translation view - * Remove surveyor user role and test user + * Remove input field on results returned from runs endpoint + * Use generic squash query for possible for squashable counts + * Update db triggers to look at run.path_nodes instead of run.path -v9.3.10 (2024-08-07) +v9.3.128 (2024-12-03) ------------------------- - * Remove surveyor users from workspaces + * Bump to flow spec version 13.6 -v9.3.9 (2024-08-07) +v9.3.127 (2024-12-03) ------------------------- - * Fix incidents templates name - * Let Ticket.body be null and make note length match contact note length + * Add new fields to flow runs to store path data in more condensed format + * Remove input/text columns from results exports -v9.3.8 (2024-08-06) +v9.3.126 (2024-12-03) ------------------------- - * Show tabs on tickets when contact is set + * Update to latest editor + * Remove antialiasing from general fonts + * Cleanup fetching of category counts for results page analytics tab + * Fix segment count backfill migration to include non-waits + * Stop writing and squashing old path counts -v9.3.7 (2024-08-06) +v9.3.125 (2024-11-28) ------------------------- - * Add contact notes ui + * Remove TWT channel type + * Prefetch run status counts used by flows list view and flows API endpoint + * Update twilio python client library + * Read segment counts from new flow activity count model -v9.3.6 (2024-08-06) +v9.3.124 (2024-11-28) ------------------------- - * Adjust the grant view for new UI - * Fix Android claim page - * Add incident for Android client app version out of date - * Tweak fail_old_messages to only fail Android messages and add an index + * Data migration to backfill new segment counts + * Update vonage python client + * Stop writing and squashing old node and status counts -v9.3.5 (2024-07-31) +v9.3.123 (2024-11-28) ------------------------- - * Support FCM changes - * Require E164 phone numbers for contacts created from UI + * Only load flow results once per page load -v9.3.4 (2024-07-30) +v9.3.122 (2024-11-27) ------------------------- - * Add contact notes and expose over contacts API endpoint + * Stop reading node and status counts from old models -v9.3.3 (2024-07-29) +v9.3.121 (2024-11-27) ------------------------- - * Clamp messages on message views to one line - * Adjust max length for AT API key - * Make 'New Field' a button + * Data migration to backfill node and status counts into new model -v9.3.2 (2024-07-29) +v9.3.120 (2024-11-27) ------------------------- - * Allow deleting of empty ticket topics - * Add support for buttons in side menu and use where appropriate + * Change new count model squashing to not insert zero sums + * Start writing run status/node counts to new count model -v9.3.0 (2024-07-25) +v9.3.119 (2024-11-26) ------------------------- - * Add User.get_by_email to ensure consistent behaviour where we look up a user by their email - * Omnibox fixes and cleanup + * Fix selecting of menu when going to group page + * Fix some JS errors on flow result pages + * Add team field to uesrs API endpoint and allowing filtering by multiple emails + * Stop validating flow revisions when fetching revision list + * Tweak axis scaling of engagement timeline chart + * Read engagement counts from new count model -v9.2.5 (2024-07-24) +v9.3.118 (2024-11-26) ------------------------- - * Ensure that emails are consistently treated as case insensitive + * Add URL param (new=1) to read engagement chart data from new count model -v9.2.4 (2024-07-23) +v9.3.117 (2024-11-26) ------------------------- - * Simplify FCM config setting names + * Update smartmin and start cleaning up old pjax/formax code + * Show hour of day chart in org timezone -v9.2.3 (2024-07-23) +v9.3.116 (2024-11-25) ------------------------- - * More updates to WhatsApp claiming + * Improve and simplify flow engagement charts + * Allow releasing FBA channels even if we cannot reach the API to deactivate new conversation triggers + * Move active flow bar into chat component + * Add email filtering for api users endpoint + * Fix safari font rendering -v9.2.2 (2024-07-23) +v9.3.115 (2024-11-22) ------------------------- - * Fix WhatsApp embedded signup + * Tweak migration to allow offline use -v9.2.1 (2024-07-18) +v9.3.114 (2024-11-22) ------------------------- - * Catch errors from xlrd reading import rows and return errors with row numbers - * Update xlrd - * Honor meta key keyboard press inside contact chat + * Data migration to backfill new flow engagement counts -v9.2.0 (2024-07-17) +v9.3.113 (2024-11-22) ------------------------- - * Simplify permissions in flows app - * Tweak menu items for msg views and flow results + * Use debug logging for channel types errors, D3 and WA + * Don't send notifications to system users + * Split up flow count squashing tasks so we can track performance separately + * Start tracking incoming message counts by flow -v9.1.198 (2024-07-17) +v9.3.112 (2024-11-21) ------------------------- - * Allow template image variables to be text with expressions + * Update editor + * Hide option to delete runs for viewers + * Don't rewind path counts for deleted runs + * Add new FlowActivityCount model + * Use debug logging for channel types errors + * Limit servicing staff users to API GETs -v9.1.196 (2024-07-16) +v9.3.111 (2024-11-20) ------------------------- - * Add __repr__ to more models and tweak existing ones for consistency - * Fix rendering of flow starts for deleted flows - * Add data migration to trim old broadcasts to nodes that resulted in very large contact lists + * Allow using the API explorer without creating an API token + * Use temba.orgs.models.User consistently -v9.1.195 (2024-07-16) +v9.3.110 (2024-11-19) ------------------------- - * Remove special error handling for broadcast to node that resolves to no recipients - * Fix setting a template on a new broadcast - * Fix query broadcast creation and update - * Add rendering of exclusions on broadcasts - * Fix not showing query on broadcast recipients list and add node_uuid + * Prevent workspace admins to modify the system user on a workspace + * Cleanup group update view + * Show on read page for a smart group if it's being populated. + * Disable async activation for channel types not showing config page + * Add is_system to user settings -v9.1.194 (2024-07-15) +v9.3.109 (2024-11-18) ------------------------- - * Add Broadcast.node_uuid field - * Remove old code for getting message created_by from broadcasts - * Make some exception clauses more specific + * Add support for TST type testing channels -v9.1.193 (2024-07-15) +v9.3.108 (2024-11-18) ------------------------- - * Replace TemplateTranslation.STATUS_UNSUPPORTED completely + * Add Org.suspended_on to track when an org was suspended + * Rework org perms again -v9.1.192 (2024-07-15) +v9.3.107 (2024-11-15) ------------------------- - * Add new template statuses and stop using fake "unsupported" status + * Fix deleting orgs with invitations to teams + * More view refactoring -v9.1.191 (2024-07-15) +v9.3.106 (2024-11-13) ------------------------- - * Fix deactivating a legacy WhatsApp channel - * Update format of templates on API endpoint - * Show template translation problems as errors on template read page + * Show on user list pages whether user has 2FA enabled + * Change confirm access view to logout on failed attempts limit + * Move login view functionality from smartmin and remove no longer needed redirect -v9.1.190 (2024-07-12) +v9.3.105 (2024-11-13) ------------------------- - * Fix padding for broadcast schedule update + * Allow staff to create invitations to workspaces -v9.1.189 (2024-07-12) +v9.3.104 (2024-11-12) ------------------------- - * Fix mailroom_db - * Data migration to populate TemplateTranslation.is_supported and is_compatible + * Revert change to disallow API posts from staff users -v9.1.188 (2024-07-12) +v9.3.103 (2024-11-12) ------------------------- - * Add new boolean fields to TemplateTranslation model to determine whether it's usable + * Only allow GET requests to API by servicing staff members + * Allow requests to break out of spa on demand -v9.1.187 (2024-07-12) +v9.3.102 (2024-11-12) ------------------------- - * Add templates to broadcasts + * Make sure agents can still edit accounts + * Reorganize staff menu -v9.1.186 (2024-07-11) +v9.3.101 (2024-11-11) ------------------------- - * Fix handling of POSTs to API docs - * Exclude empty templates from list, and show base translation apart on read page - * Ensure we choose a new base for a template whenever an existing base translation is deleted + * Fix issues with inconsistent header names + * Add our own simple logout view + * Ending servicing of an org should redirect to org list page + * Stop exposing the Smartmin UserCRUDL -v9.1.185 (2024-07-11) +v9.3.100 (2024-11-11) ------------------------- - * Update deps - * Replace telegram library by requests use - * Fix dashboard menu link permission - * Expose Template.base_translation on API endpoint + * Show error dialog for http errors in more places + * Remove old scheduled bcast read view and legacy paginator template + * Create our own view for user failed login + * Don't include smartmin.csv_imports urls and move relayer urls to channels app + * Tweak appearance of login page + * Don't show org suspended warning on any no-nav pages + * Fix styling on join-accept page + * Don't notify new admins that they joined a workspace + * Fix fetching tickets as servicing staff user -v9.1.184 (2024-07-11) +v9.3.99 (2024-11-08) ------------------------- - * Use dropdowns for location fields + * Fix resolving ticket topics for servicing staff users -v9.1.183 (2024-07-11) +v9.3.98 (2024-11-08) ------------------------- - * Use dropdowns for location fields + * Add readonly_servicing to OrgPermsMixin -v9.1.182 (2024-07-10) +v9.3.97 (2024-11-08) ------------------------- - * Locations API endpoint should allow searching on the path - * Fix template syncing when channel gives us invalid template data + * Handle ajax response redirects + * Only include temba org if it is set -v9.1.181 (2024-07-10) +v9.3.96 (2024-11-07) ------------------------- - * Add Template.base_translation - * Fix dashboard workspace data - * Allow creation of contacts with non-active statuses + * Fix org obj perms mixin for staff users + * Omit temba-org header in some cases + * Fix org start view and org_perms context processor for servicing staff users + * Remove no longer used partial template view -v9.1.180 (2024-07-10) +v9.3.95 (2024-11-07) ------------------------- - * Drop no longer used is_active field from TemplateTranslation - * Tweak wording on template list page - * Add db constraint to ensure contact status is valid + * Only allow GET requests by servicing staff users + * Send temba-org header from components + * More obvious account servicing + * Restrict staff servicing org perms to non-POST requests + * OrgMiddleware should prevent cross-org POSTs -v9.1.179 (2024-07-10) +v9.3.94 (2024-11-07) ------------------------- - * Keep FCM ID in channel config when soft deleting the channel - * Stop using TemplateTranslation.is_active and make nullable + * Allow updating agent team from user list page + * User and invitation list views should show team for agent users if that feature is enabled + * Allow creating invitations with teams + * Remove experimental mailgun channel type + * Fix displaying of exports based on status "groups" -v9.1.178 (2024-07-09) +v9.3.93 (2024-11-05) ------------------------- - * Allow broadcast creation with zero matches + * Allow invitations to specify team and block team deletion when it has pending invitations + * Drop no longer used count models -v9.1.177 (2024-07-08) +v9.3.92 (2024-11-05) ------------------------- - * Hard delete remaining soft-deleted template translations + * Remove database triggers to maintain old notification counts -v9.1.176 (2024-07-08) +v9.3.91 (2024-11-05) ------------------------- - * Update Template to a TembaModel - * Hard delete template translations that no longer exist on the channel side + * Update some deps + * Start using new notification counts + * Add data migration to backfill new notification counts -v9.1.175 (2024-07-05) +v9.3.90 (2024-11-05) ------------------------- - * Make send_when optional when updating broadcasts + * Start writing notification counts to orgs_itemcount -v9.1.174 (2024-07-05) +v9.3.89 (2024-11-05) ------------------------- - * Fix updating scheduled broadcasts - * Remove old unused code for queueing broadcasts + * Fix calculating field usages on API endpoint + * Stop writing old ticket counts -v9.1.173 (2024-07-05) +v9.3.88 (2024-10-31) ------------------------- - * Add Msg.is_android field - * Add internal API endpoint for searching locations by level and name - * Remove option to send now on broadcast update + * Fix browsing definitions API endpoint docs + * Fix the My Tickets icon, wasn't always accurate + * Prevent deletion of non-empty teams + * Start reading from new ticket counts -v9.1.172 (2024-07-04) +v9.3.87 (2024-10-31) ------------------------- - * Add templates to broadcasts (hidden for now) - * Remove deprecated broadcast.template_state field on mailroom queue payload + * Make shortcuts an optional attribute on compose -v9.1.171 (2024-07-03) +v9.3.86 (2024-10-31) ------------------------- - * Update payload for queueing a bradocast + * Replace custom chunk_list with new itertools.batched + * Fetch logs from DynamoDB in batches of 100 + * Data migration to back fill item counts for tickets -v9.1.170 (2024-07-03) +v9.3.85 (2024-10-30) ------------------------- - * Remove no longer needed task to sync stale Android relayers - * Don't allow template localization - * Update dependencies + * Add generic squashable count model for things owned by orgs + * Implement tickets counts by topic and assignee using new count model + * Ensure that ticket counts are cleaned up when a topic is deleted -v9.1.169 (2024-07-02) +v9.3.84 (2024-10-30) ------------------------- - * Use python 3.11.x - * Add Broadcast.template_variables - * Add new template list and read pages and remove old channel specific ones - * Fix globals list template + * Reduce topic limit to 50 and enforce limits for topics and teams + * Implement filtering of tickets by accessible topics -v9.1.168 (2024-06-28) -------------------------- - * Don't sync classifiers in suspended orgs - * Fix empty contact search with query present +## v9.3.83 (2024-10-30) -v9.1.167 (2024-06-28) -------------------------- - * Disallow empty recipient targeting - * Fix external links within spa container +- Show same featured + proxy fields on the group pages +- Fix scrolling for contact group pages +- Add query checks to ticket view tests and fix missing prefetches -v9.1.166 (2024-06-27) -------------------------- - * Tweak logging for failure during classifier syncing - * Switch broadcast tests to use contact search +## v9.3.82 (2024-10-29) -v9.1.165 (2024-06-27) -------------------------- - * Rework remaining mailroom client methods - * Add unique constraint on template translations +- Make teams an org feature.. that nobody has for now +- Some cleanup to topic crudl and ticket folders +- Tweak name/url of contact group filter list page +- Filter topics in topic selection menu based on team membership +- Add basic team CRUDL views -v9.1.164 (2024-06-27) -------------------------- - * Add data migration to remove duplicate template translations +## v9.3.81 (2024-10-28) -v9.1.163 (2024-06-27) -------------------------- - * Change template translation syncing to enforce uniqueness over channel+locale +- Fix N+1 query on contact list page +- Cleanup more list pages and move more functionality to org/base views -v9.1.162 (2024-06-27) -------------------------- - * Make templatetranslation locale non-null - * Add migration to release translations for released channels +## v9.3.80 (2024-10-24) -v9.1.161 (2024-06-27) -------------------------- - * Fix not releasing template translations when channel released +- Add migration to assign teamless agents to the default team +- Prevent deletion of system teams +- Assign new agent users to the default team if team not specified +- Data migration to give existing orgs a default team -v9.1.160 (2024-06-27) -------------------------- - * Fix creating scheduled broadcasts - * Tweak menu on campaign read page - * Update to latest smartmin +## v9.3.79 (2024-10-24) -v9.1.159 (2024-06-26) -------------------------- - * Simplify some button labels and make edit a button on contact read page - * Don't show empty contact filter list - * Rework more mailroom client methods to use models instead of primitives +- Add max length of 10,000 to shortcut text +- Give every workspace a default team with access to all topics +- Change delete links on list views to be clearer -v9.1.158 (2024-06-26) -------------------------- - * Add day selection when doing flow start search - * Tweak mailroom_db to run on different port +## v9.3.78 (2024-10-23) -v9.1.157 (2024-06-25) -------------------------- - * Reorg of mailroom client - * Add Broadcast.exclusions +- Use django filter to format archive size +- Fix paging on archive list pages and make styling consistent with other list views +- Add Team.all_topics to more easily model a team that can access all topics +- Remove styles from contact field list page that are no longered used since it became a placeholder for the field management component +- Convert API tokens page to be real list page +- Make some list pages use a common template -v9.1.156 (2024-06-24) -------------------------- - * Change broadcast creation from UI to use mailroom +## v9.3.77 (2024-10-22) -v9.1.155 (2024-06-24) -------------------------- - * Fix WAC to addEventListener in OnSpload - * Fix horizontal scrolling for contacts list - * Add Broadcast.template +- Update django +- Update to python 3.12 +- Simplify bulk labeling of msgs and flows +- Remove unused code from MsgCRUDL.Menu and add test -v9.1.154 (2024-06-21) -------------------------- - * Fix z-index issue properly +## v9.3.76 (2024-10-18) -v9.1.153 (2024-06-21) -------------------------- - * Fix z-index issue with content menu and chat +- Fix agents shortcuts permission -v9.1.152 (2024-06-21) -------------------------- - * Fix ticket switching bug +## v9.3.75 (2024-10-17) -v9.1.151 (2024-06-21) -------------------------- - * Update chat rendering +- Add Shortcuts UI +- Normal menu navigation for tickets -v9.1.148 (2024-06-20) -------------------------- - * Fix Broadcast.create +## v9.3.74 (2024-10-17) -v9.1.147 (2024-06-20) -------------------------- - * Use mailroom to create broadcasts from API calls - * Use mailroom to send broadcasts to flow nodes +- Remove pre-spa days code from flow list view +- Add more clarifications to FreshChat claim page +- Cleanup channel claim pages with steps -v9.1.146 (2024-06-17) -------------------------- - * Don't clip footer when ticket history grows - * Fix migration to add uuid field to airtime transfers +## v9.3.73 (2024-10-17) -v9.1.145 (2024-06-17) -------------------------- - * Don't send forgot password email if one was sent in last 5 minutes - * Delete failed login records on successful password reset - * Make transer UUID unique field, use TembaUUIDMixin on model +- Overhaul UI for managing child workspaces -v9.1.144 (2024-06-14) -------------------------- - * Add pagination on channel templates page - * Add settings config for Android clients FCM config - * Remove pyfcm and use google auth library to send sync messages for FCM - * Create our own password recovery view +## v9.3.72 (2024-10-17) -v9.1.143 (2024-06-12) -------------------------- - * Update smartmin - * Delete recovery tokens when new ones are created or email changed - * Populate airtime transfer uuids +- Move org service view to staff app +- Drop Invitation.user_group and UserSettings.team -v9.1.142 (2024-06-12) -------------------------- - * Add AirtimeTransfer.external_id - * Add data migration to cleanup template translations +## v9.3.71 (2024-10-17) -v9.1.141 (2024-06-12) -------------------------- - * Update to latest smartmin - * Add uuid field to airtime transfer model +- Fix invitations count on org menu to exclude expired invitations -v9.1.140 (2024-06-12) -------------------------- - * Really actually fix template attachments for real +## v9.3.70 (2024-10-16) -v9.1.139 (2024-06-11) -------------------------- - * Fix split issue for template editor +- Data migration to set Invitation.role_code -v9.1.138 (2024-06-10) -------------------------- - * Template editor fix for empty content - * Tweak component types to be header/*, body/* etc - * Support Twilio media in templates +## v9.3.69 (2024-10-16) -v9.1.137 (2024-06-10) -------------------------- - * Support WhatsApp templates with header images - * Remove no longer used URN related code - * Generate email verification secret when account created, change when email changed +- Fix how we model team membership so that users can belong to different teams in different workspaces -v9.1.136 (2024-06-07) -------------------------- - * Add spa mixin to transfer logs views - * Allow editing TWA messaging service SID - * Lean on mailroom for URN validation during contact update - * Some tidy up of the update contact form +## v9.3.68 (2024-10-16) -v9.1.135 (2024-06-05) -------------------------- - * Fix login error message styling - * Remove unused JS libs +- Tweak user update and delete forms to return 404 for users not in the current org -v9.1.134 (2024-06-05) -------------------------- - * Contact API endpoint should let mailroom decide if a URN is taken - * Revert "Remove csrf token hidden element not under a form" +## v9.3.67 (2024-10-16) -v9.1.133 (2024-06-05) -------------------------- - * Fix API explorer POSTs - * Make CSRF cookie age 2 weeks and remove non-form hidden CSRF hidden elements +- New CRUDL views for org users and invitations -v9.1.132 (2024-06-04) -------------------------- - * Make sure the CSRF element is present for all page header blocks +## v9.3.66 (2024-10-16) -v9.1.131 (2024-05-31) -------------------------- - * Fix DT One submit buttons +- Fix displaying the channel log missing HTTP response +- Fix claim number to display non field errors +- Remove support for user management of sub-orgs without switching to those orgs -v9.1.130 (2024-05-31) -------------------------- - * Fix flow and msgs unlabel action - * Remove no longer used params field on synched whatsapp type templates +## v9.3.65 (2024-10-10) -v9.1.129 (2024-05-29) -------------------------- - * Increase DATA_UPLOAD_MAX_NUMBER_FIELDS to 2500 - * Fix FB and IG claim getFBpages +- Add mixin for views that require a feature -v9.1.128 (2024-05-27) -------------------------- - * Lean on mailroom for validation of phone numbers from android events / messages +## v9.3.64 (2024-10-09) -v9.1.127 (2024-05-27) -------------------------- - * Rework contact create view to let mailroom do URN validation +- Fix modal for deleting a shortcut +- Tweak list view templates for consistency +- Data migration to tweak names of existing status groups -v9.1.126 (2024-05-24) -------------------------- - * Mailroom client should use content-type header on responses to know whether to parse as JSON - * Ensure anon users can access API docs +## v9.3.63 (2024-10-09) -v9.1.125 (2024-05-23) -------------------------- - * Add csrf on hidden element +- Create status groups with invalid names to avoid conflicts with real group names +- Bump django from 5.1 to 5.1.1 -v9.1.124 (2024-05-22) -------------------------- - * Rework handling of errors from mailroom client - * Update test db flows +## v9.3.62 (2024-10-08) -v9.1.123 (2024-05-20) -------------------------- - * Replace django messages rendering with toasts +- Fix double character rendering on autogrow inputs -v9.1.121 (2024-05-16) -------------------------- - * Fix action to remove from group. - * Report bulk action errors to users with django messages +## v9.3.61 (2024-10-08) -v9.1.120 (2024-05-16) -------------------------- - * Remove old unused ES sorting code - * Update to latest smartmin and disable auto success messages - * Add data migration to fix system fields for existing orgs and start using is_proxy - * Reduce reserved keys for fields to bare minimum +- Move staff only rg and user views to new staff app -v9.1.119 (2024-05-16) -------------------------- - * Add ContactField.is_proxy and reduce SYSTEM_FIELDS to the two proxy date fields - * Don't use error level alerts for form errors +## v9.3.60 (2024-10-08) -v9.1.118 (2024-05-15) -------------------------- - * Remove unused args from MailroomClient.parse_query - * Re-add search errors to contact list views +- Improve invitation emails -v9.1.117 (2024-05-15) -------------------------- - * Add support for unknown_property_type search errors - * Add support for twilio card type content templates - * Add way to view webhook logs errors only +## v9.3.59 (2024-10-08) -v9.1.116 (2024-05-14) -------------------------- - * Fix issues with twilio templates sync +- Fix not creating invitation accepted notifications in case of new user signup -v9.1.115 (2024-05-10) -------------------------- - * Fix Twilio template type slug and register its template type +## v9.3.58 (2024-10-08) -v9.1.114 (2024-05-10) -------------------------- - * Add message templates menu for TWA channels - * Activate Twilio Whatsapp to sync templates with twilio type - * Update to allow matching sender ID as valid phones +- Use mailroom to trigger android channel sync +- Add new notification type for when an invitation to join a workspace is accepted +- More refactoring of modal views -v9.1.113 (2024-05-09) -------------------------- - * Fix gaps it contact history +## v9.3.57 (2024-10-04) -v9.1.112 (2024-05-09) -------------------------- - * Ignore android msg/event cmds with non numeric phones +- More view refactoring -v9.1.111 (2024-05-08) -------------------------- - * Send phone instead of urn to mailroom android endpoints - * Add Twilio content template type, and TWA fetch_templates +## v9.3.56 (2024-10-03) -v9.1.110 (2024-05-08) -------------------------- - * Remove messages block that duplicates alert-messages - * Tweak DefinitionExport.name for consistency +- Cleanup some view mixins -v9.1.109 (2024-05-07) -------------------------- - * Tweak export finished emails so they don't say Excel +## v9.3.55 (2024-10-03) -v9.1.108 (2024-05-07) -------------------------- - * Update temba-components to 0.86.1 - * Change flow definitions export to be async, use new export type +- Temporarily hide menu item for shortcuts +- Add pagination to flow starts and webhook logs pages +- Add internal API endpoint for fetching shortcuts +- Add model and CRUDL views for ticket shortcuts +- Fix topic create and update and tweak list pages for consistency -v9.1.107 (2024-05-07) -------------------------- - * Fix variable name in http log read page - * Fix claiming instagram +## v9.3.54 (2024-10-02) -v9.1.106 (2024-05-06) -------------------------- - * Fix globals API endpoint +- Adjust background flow start preview to include all contacts in other flows +- Make template sync use consistent components order to avoid breaking flows variables -v9.1.105 (2024-05-03) -------------------------- - * Fix race condition on editor load +## v9.3.53 (2024-10-01) -v9.1.104 (2024-05-03) -------------------------- - * Fix template bug and loading error for editor +- Fix location aliases to only update in one workspace -v9.1.103 (2024-05-02) -------------------------- - * Fix contact field selection +## v9.3.52 (2024-09-30) -v9.1.102 (2024-05-02) -------------------------- - * Delete all sessions and runs in org deletion in batches - * Tiny style change for loader wrapping on editor +- Add test_errors to mailroom client -v9.1.101 (2024-05-01) -------------------------- - * Update editor and flow spec version +## v9.3.51 (2024-09-27) -v9.1.100 (2024-04-29) -------------------------- - * Tweak time limit for sessions to 89 days so things are always interrupted before archiver gets to them - * Cleanup API endpoint docs +- Update components with progress bar tweaks -v9.1.99 (2024-04-26) -------------------------- - * Remove elastic search - * Add support for read msg status +## v9.3.50 (2024-09-27) -v9.1.98 (2024-04-25) -------------------------- - * Fix ticket status selection +- Add commas for broadcast message count -v9.1.97 (2024-04-25) -------------------------- - * Include url for org chooser +## v9.3.49 (2024-09-26) -v9.1.96 (2024-04-25) -------------------------- - * Remove jQuery +- Tweak deindexing a deleted contact -v9.1.95 (2024-04-25) -------------------------- - * Change ordering of non-search based exports to be id to match search based - * Use mailroom endpoint for search based contact exports - * Remove cancel button from contact import page and remove duplicate styles - * Tweak layout of user edit form - * Email notification that account email has changed should include the new email address +## v9.3.48 (2024-09-26) -v9.1.94 (2024-04-24) -------------------------- - * Fix changing password so user isn't logged out - * Fix user edit form allowing insecure passwords +- Use 10th anniversary rp logo +- Explicitly de-index contacts when released +- Request de-indexing of contacts when hard deleting an org +- Switch to flowstart_list permission for status +- Add status and interrupt for broadcasts and starts -v9.1.93 (2024-04-24) -------------------------- - * Add notification types for when email or password is changed - * Expire unaccepted invitations after 30 days - * Move invitation form into modal +## v9.3.47 (2024-09-25) -v9.1.92 (2024-04-23) -------------------------- - * Remove start url for surveyors and instead do login redirect - * Fix to disallow content type vs extension mismatching for media uploads - * Fix to limit sending user verification email to 1 per 10 minutes - * Remove warning for flows that don't specify Facebook topic +- Re-introduce QUEUED status for FlowStarts and Broadcasts +- Remove progress field from flow starts endpoint docs -v9.1.91 (2024-04-18) -------------------------- - * Fix select race - * Fix header matching - * Simplify URL for template list page +## v9.3.46 (2024-09-23) -v9.1.90 (2024-04-16) -------------------------- - * Fix race on initial load for select and tabs +- Add progress field to broadcasts API endpoint +- Add Broadcast.interrupt(user) -v9.1.89 (2024-04-16) -------------------------- - * Fix API docs scrolling - * Fix mailroom_db data file - * Simplify channel claim page styling and remove unused styles - * Add Msg.templating +## v9.3.45 (2024-09-23) -v9.1.88 (2024-04-15) -------------------------- - * Drop FlowRun.submitted_by and cleanup superfulous constants - * Make whatsapp template type an actual package - * Simplify page titles so section isn't repeated in title +- Add PENDING/STARTED statuses and contact_count field to broadcasts -v9.1.87 (2024-04-12) -------------------------- - * Add inline attachment style and wrapping on logs - * Don't re-release released triggers +## v9.3.44 (2024-09-23) -v9.1.86 (2024-04-12) -------------------------- - * Prune unnecessary styles, move to heavier fonts +- Validate channel variable in the body for EX channels +- Replace broadcast status S with C -v9.1.85 (2024-04-12) -------------------------- - * Drop support for Submitted By in results exports - * Add constraint to limit Msg.DIRECTION to I or O - * Add constraint to incoming messages have channel and URN +## v9.3.43 (2024-09-19) -v9.1.83 (2024-04-11) -------------------------- - * Add TemplateType and rework whatsapp to be a type - * Remove special treatment for exports of surveyor flows - * Add TemplateTranslation.variables +- Add support broadcast status (C)COMPLETED +- Remove broadcasts from Outbox now that they have their own page +- Put starts before webhooks on flow history menu -v9.1.82 (2024-04-10) -------------------------- - * Unpublicize the channel events API endpoint - * Drop unused Msg.queued_on field +## v9.3.42 (2024-09-18) -v9.1.81 (2024-04-10) -------------------------- - * Update temba-components +- Cleanup how we read and anonymize channel logs -v9.1.80 (2024-04-10) -------------------------- - * Assume js is pre-minified +## v9.3.41 (2024-09-18) -v9.1.79 (2024-04-09) -------------------------- - * Update flow editor +- Limit SetRunResult category length in editor +- Add --testing argument to migrate_dynamo command +- Start reading attached channel logs from DynamoDB instead of S3 -v9.1.78 (2024-04-09) -------------------------- - * Use new components bundle +## v9.3.40 (2024-09-17) -v9.1.77 (2024-04-09) -------------------------- - * Deprecate Msg.queued_on as it isn't used and make Msg.modified_on non-null +- Add INTERRUPTED as a status for flow starts +- Switch flow starting blocker to warning -v9.1.76 (2024-04-08) -------------------------- - * Add data migration to backfill missing user settings - * Add signal receiver to ensure new users always have settings +## v9.3.39 (2024-09-17) -v9.1.75 (2024-04-04) -------------------------- - * Add data migration to archive campaigns with deleted groups - * Fix rendering of campaigns with deleted groups - * Improve styling on template list page +- Show bad import file error as validation errors to the user +- Fix flow start progress bar with high pcts +- Simplify outbox limit to be hardcoded at 1M +- Validate body for EX channel type will be valid JSON after replacing variables -v9.1.74 (2024-04-04) -------------------------- - * Update temba-components - * Use timedate formatting for last_seen_on / created_on on contact list pages - * Remove unused BRAND properties - * Cleanup displaying of channel name, address and type +## v9.3.38 (2024-09-14) -v9.1.73 (2024-04-03) -------------------------- - * Make Channel.name non-null and remove unused channel list view - * Replace format_datetime and short_datetime tags with day or datetime filters +- Add flow start progress bar -v9.1.72 (2024-04-03) -------------------------- - * Update temba-components - * Add data migration to backfill empty channel names - * Ensure Android channels get a default name when registering +## v9.3.37 (2024-09-13) -v9.1.71 (2024-04-03) -------------------------- - * Ignore empty messages from Android relayers +- Fix import read page title +- Fix importing contacts from spreadsheet with broken dimensions +- Fix TTL attribute name on DynamoDB channel logs table -v9.1.70 (2024-04-03) -------------------------- - * Update flow editor - * Remove unused option on assets endpoint to return environment +## v9.3.36 (2024-09-12) -v9.1.69 (2024-04-02) -------------------------- - * Remove no longer used template tag as_icon - * Fix export blocking due to multiple users exporting at same time - * Switch formax to expand vertically - * Add ChannelEvent.status field and prevent creating channel events of unknown types from Android syncs +- Use 'tasks:batch' queue name instead of 'batch' -v9.1.68 (2024-04-02) -------------------------- - * Use mailroom endpoints to create messages and events during Android syncing - * Drop support for returning template components as dict +## v9.3.35 (2024-09-12) -v9.1.67 (2024-04-01) -------------------------- - * Update template editor to work with comps as list - * Add task to trim old channel events +- Add progress field to flow starts endpoint -v9.1.66 (2024-03-28) -------------------------- - * Update format of tasks queued to mailroom +## v9.3.34 (2024-09-11) -v9.1.65 (2024-03-28) -------------------------- - * Update to django 5.0 and DRF 3.15.1 +- Add timing controls around flow starts -v9.1.64 (2024-03-25) -------------------------- - * Tweak menu styling +## v9.3.33 (2024-09-11) -v9.1.63 (2024-03-22) -------------------------- - * Add open tab event +- Rename dynamodb channel logs table -v9.1.62 (2024-03-22) -------------------------- - * Make workspace selection use common event pattern - * Truncate long template name to not break the page - * Replace iso630 with iso639-lang package - * Fix non Django 5 compatible code +## v9.3.32 (2024-09-07) -v9.1.61 (2024-03-21) -------------------------- - * Support for menu events +- Add outbox monitor for large queues -v9.1.60 (2024-03-21) -------------------------- - * Update to latest ruff, isort and djlint - * Drop TemplateTranslation.comps_as_dict - * Get rid of channel typed owned sync log views and use new channel view on HTTP log CRUDL - * Convert templates views to actual CRUDL and fix permissions +## v9.3.31 (2024-09-05) -v9.1.59 (2024-03-21) -------------------------- - * Move template code into templates app - * Stop writing TemplateTranslation.comps_as_dict +- Add an org limit for too many messages in outbox -v9.1.58 (2024-03-20) -------------------------- - * Some fixes for on-device mobile issues - * Allow returning of components in list format from API endpoint - * Update to latest black - * Don't try to extract parameters from template url button component display values +## v9.3.30 (2024-09-02) -v9.1.57 (2024-03-20) -------------------------- - * Add name field also to template components - * Tweak template list page to use components list instead of comps_as_dict +- Import cell data value instead of formulas using data_only flag to load the workbook -v9.1.56 (2024-03-19) -------------------------- - * Save TemplateTranslation.components as list, use comps_as_dict for API endpoint +## v9.3.29 (2024-08-27) -v9.1.55 (2024-03-19) -------------------------- - * Add temporary TemplateTranslation.comps_as_dict field +- Fix authorization code, verification, redirect URI -v9.1.54 (2024-03-19) -------------------------- - * Add type to template components - * Remove deprecated fields from template translations +## v9.3.28 (2024-08-27) -v9.1.53 (2024-03-18) -------------------------- - * Fix mobile notice +- Authorization code cannot be debugged +- Fix channel URLs to have a trailing slash +- Delete no longer used test flows +- Simplify functions for loading flows in tests and move flows used by legacy migration tests into their own directory +- TembaTest.create_flow should return a flow in latest version without migrating +- Only import real flows in tests where it's required +- Update README.md -v9.1.52 (2024-03-18) -------------------------- - * Don't migrate flows when listing campaign events +## v9.3.27 (2024-08-21) -v9.1.51 (2024-03-17) -------------------------- - * Tweaks to make the interface more mobile friendly +- Updates to migrate_dynamo command -v9.1.50 (2024-03-17) -------------------------- - * Better feedback when editing contact fields +## v9.3.26 (2024-08-21) -v9.1.49 (2024-03-15) -------------------------- - * Add url param type for buttons with URLs +- Add redirect for contact interrupt +- Create dynamo table with on-demand billing by default -v9.1.48 (2024-03-14) -------------------------- - * Show more components for WA templates list - * Add display to WA templates button components +## v9.3.25 (2024-08-21) -v9.1.47 (2024-03-14) -------------------------- - * Remove old templates API endpoint - * Update flow version for campaigns events single message flows +- Fix matching for invites with email case insensitively +- Tweak migrate_dynamo command -v9.1.46 (2024-03-13) -------------------------- - * Reduce WA template sync error logging to ignore those in http logs +## v9.3.24 (2024-08-20) -v9.1.45 (2024-03-12) -------------------------- - * Fix the size limit for contact exports +- Add dynamo table prefix setting -v9.1.44 (2024-03-12) -------------------------- - * Drop old export models and assets app +## v9.3.23 (2024-08-20) -v9.1.43 (2024-03-11) -------------------------- - * Data migration to delete old flow results exports - * Data migration to delete old msgs exports +- Add management command to create DynamoDB tables +- Add option for connection pooling -v9.1.42 (2024-03-11) -------------------------- - * Data migration to delete old contacts exports +## v9.3.22 (2024-08-19) -v9.1.41 (2024-03-11) -------------------------- - * Mark templates with button URLs and attachment in header not supported - * Convert exports to use shared export modal view +- Drop APIToken.role field -v9.1.40 (2024-03-08) -------------------------- - * Allow more WhatsApp templates to be usable in the flows +## v9.3.21 (2024-08-19) -v9.1.39 (2024-03-07) -------------------------- - * Updated editor with sendmsg update fix - * Improve contact export modal and use mailroom endpoint to know how many contacts will be exported +- Use correct URL when breaking spa-container +- Delete API tokens when user deleted and use generate_secret to create new tokens +- Update API token management UI to support multiple tokens -v9.1.38 (2024-03-07) -------------------------- - * Updated component button rendering +## v9.3.20 (2024-08-14) -v9.1.37 (2024-03-07) -------------------------- - * Do not sync templates for channels on suspended orgs or inactive orgs - * Redact WA password config in HTTP logs +- Rework S3 code to always use real S3 clients, even in tests -v9.1.36 (2024-03-06) -------------------------- - * Bump spec version to 13.4 - * Update editor to support template components +## v9.3.19 (2024-08-14) -v9.1.35 (2024-03-06) -------------------------- - * Restrict exports of contact groups that are too big - * Redact auth tokens from http logs when fetching whatsapp templates - * Cleanup code for fetching whatsapp templates and only create incidents after 5 failures - * Add data migration to delete old ticket exports +- Fix DTOne formax section +- Change default settings to use minio for file storage -v9.1.34 (2024-03-04) -------------------------- - * Update floweditor +## v9.3.18 (2024-08-13) -v9.1.33 (2024-03-04) -------------------------- - * Bump current flow spec version to 13.3 - * Ensure incidents are ended when releasing a channel +- Record when API tokens were last used +- Only support import contacts using .xlsx files with openpyxl -v9.1.32 (2024-03-04) -------------------------- - * Update temba-components - * Always send verification email with branding of current org - * Add incident for WhatsApp templates sync failed +## v9.3.17 (2024-08-12) -v9.1.31 (2024-02-28) -------------------------- - * Fix editing user when language is not an option +- Data migration to delete old surveyor and prometheus API tokens -v9.1.30 (2024-02-28) -------------------------- - * Hide UI language options when there aren't any - * Update test_db templates +## v9.3.16 (2024-08-08) -v9.1.29 (2024-02-27) -------------------------- - * Remove DS from available channel and only accessible to beta group - * Prevent further creation of surveyor users since that functionality no longer works +- Stop generating prometheus API tokens +- Drop Ticket.body -v9.1.28 (2024-02-22) -------------------------- - * Store servicing flag in session to avoid needing user orgs in context processor - * Add select_related to user loading for sessions and API tokens - * Bump cryptography from 42.0.2 to 42.0.4 +## v9.3.15 (2024-08-08) -v9.1.27 (2024-02-21) -------------------------- - * Update floweditor +- Add Org.prometheus_token and backill from API tokens -v9.1.26 (2024-02-18) -------------------------- - * Bump cryptography from 42.0.0 to 42.0.2 - * Improve the form for setting flow SMTP and make reusable +## v9.3.14 (2024-08-08) -v9.1.25 (2024-02-14) -------------------------- - * Update temba-components +- Update tests to not set ticket body +- Add data migration to move body to ticket on open ticket event -v9.1.24 (2024-02-12) -------------------------- - * Use dict for flow type icons instead of nested if elses - * Simplify export finished notification emails - * Use Org.Export for flows results exports +## v9.3.13 (2024-08-08) -v9.1.23 (2024-02-09) -------------------------- - * Fix org avatar scale for menu - * Fix widget for user avatar +- Show notes on ticket open events in contact history +- Remove body from ticket endpoint documentation +- Update floweditor which now also refers to ticket body as note +- Update open ticket modal to use note instead of body +- Add cutoff date for using viewer role -v9.1.22 (2024-02-08) -------------------------- - * Fix croppie dependency - * Prefetch user settings on users endpoint +## v9.3.12 (2024-08-07) -v9.1.21 (2024-02-08) -------------------------- - * Make user settings one to one +- Don't create surveyor user in mailroom test db +- Add warning to manage accounts page if org has viewers +- Remove viewers as an org feature, only allow existing viewer users to remain as viewers +- Update to latest Django -v9.1.20 (2024-02-08) -------------------------- - * Use orgs.Export for messages exports - * Simplify sending template emails - * Add new endpoint to internal API for templates - * Trim old export and notifications - * Add support for user avatars +## v9.3.11 (2024-08-07) -v9.1.19 (2024-02-07) -------------------------- - * Save transformed components for WA templates +- Remove Org.surveyor_password and always disable creating surveyor flows +- Remove non-modal response support from export translation view +- Remove surveyor user role and test user -v9.1.18 (2024-02-06) -------------------------- - * Cleanup flow SMTP formax and show parent settings as default to match mailroom changes - * Remove old code for saving SMTP into org config +## v9.3.10 (2024-08-07) -v9.1.17 (2024-02-06) -------------------------- - * Data migration to backfill Org.flow_smtp +- Remove surveyor users from workspaces -v9.1.16 (2024-02-06) -------------------------- - * Add new dedicated Org.flow_smtp field for email settings +## v9.3.9 (2024-08-07) -v9.1.15 (2024-02-06) -------------------------- - * Bump cryptography from 41.0.7 to 42.0.0 - * Simplify getting default flow email address +- Fix incidents templates name +- Let Ticket.body be null and make note length match contact note length -v9.1.14 (2024-01-30) -------------------------- - * Remove using readonly DB connection for fetching groups and fields +## v9.3.8 (2024-08-06) -v9.1.13 (2024-01-29) -------------------------- - * Simplify how we check for existing running exports - * Dta migration to mark old notifications as seen - * Improve export download page - * Allow marking all notifications as read by DELETE request to notifications endpoint - * Use orgs.Export for contact exports +- Show tabs on tickets when contact is set -v9.1.12 (2024-01-23) -------------------------- - * Tweak mailgun channel claiming +## v9.3.7 (2024-08-06) -v9.1.11 (2024-01-18) -------------------------- - * Some cleanup to new exports framework +- Add contact notes ui -v9.1.10 (2024-01-18) -------------------------- - * Add skeleton staff only mailgun channel type - * Add export download view +## v9.3.6 (2024-08-06) -v9.1.7 (2024-01-18) -------------------------- - * Update temba-components - * Save storage path on exports and fix ticket exports not having a download URL +- Adjust the grant view for new UI +- Fix Android claim page +- Add incident for Android client app version out of date +- Tweak fail_old_messages to only fail Android messages and add an index -v9.1.6 (2024-01-18) -------------------------- - * Add new generic orgs.Export model and replace ExportTicketsTask - * Simplify messaging when export is started +## v9.3.5 (2024-07-31) -v9.1.5 (2024-01-15) -------------------------- - * Allow webchat channels to have new convo triggers - * Finished exports should record number of items exported +- Support FCM changes +- Require E164 phone numbers for contacts created from UI -v9.1.4 (2024-01-12) -------------------------- - * Add skeleton temba chat channel type +## v9.3.4 (2024-07-30) -v9.1.3 (2024-01-12) -------------------------- - * Add notification for flow exports +- Add contact notes and expose over contacts API endpoint -v9.1.2 (2024-01-11) -------------------------- - * Fix issue with completion input focus +## v9.3.3 (2024-07-29) -v9.1.1 (2024-01-11) -------------------------- - * Update notification text +- Clamp messages on message views to one line +- Adjust max length for AT API key +- Make 'New Field' a button -v9.1.0 (2024-01-11) -------------------------- - * Add notifications to UI - * Fix test_db command - * Update stable versions in README +## v9.3.2 (2024-07-29) -v9.0.0 (2024-01-05) -------------------------- - * Test against mailroom v9 - * Replace dummy migrations with real squashed migrations +- Allow deleting of empty ticket topics +- Add support for buttons in side menu and use where appropriate -v8.3.123 (2024-01-05) -------------------------- - * Add empty versions of squashed migrations +## v9.3.0 (2024-07-25) -v8.3.122 (2024-01-04) -------------------------- - * Update to latest editor +- Add User.get_by_email to ensure consistent behaviour where we look up a user by their email +- Omnibox fixes and cleanup -v8.3.121 (2024-01-04) -------------------------- - * Update to latest floweditor with open ticket changes +## v9.2.5 (2024-07-24) -v8.3.120 (2024-01-03) -------------------------- - * Allow ticket body to be optional +- Ensure that emails are consistently treated as case insensitive -v8.3.119 (2024-01-03) -------------------------- - * Drop ticketer model +## v9.2.4 (2024-07-23) -v8.3.118 (2024-01-03) -------------------------- - * Remove view of http logs by ticketer - * Drop Ticket.ticketer and HTTPLog.ticketer +- Simplify FCM config setting names -v8.3.117 (2024-01-03) -------------------------- - * Remove ticketer types +## v9.2.3 (2024-07-23) -v8.3.116 (2024-01-03) -------------------------- - * Fix editor routing edge case - * Remove ticketers API endpoint +- More updates to WhatsApp claiming -v8.3.115 (2024-01-02) -------------------------- - * Update to latest flow editor - * Drop index on ticket.external_id +## v9.2.2 (2024-07-23) -v8.3.114 (2024-01-02) -------------------------- - * Stop exposing ticket ticketer on endpoints +- Fix WhatsApp embedded signup -v8.3.113 (2024-01-02) -------------------------- - * Update temba-components - * Finish cleaning up API v2 tests to use APITestMixin +## v9.2.1 (2024-07-18) -v8.3.112 (2023-12-14) -------------------------- - * ContactChat with less padding +- Catch errors from xlrd reading import rows and return errors with row numbers +- Update xlrd +- Honor meta key keyboard press inside contact chat -v8.3.111 (2023-12-14) -------------------------- - * Introduce footer +## v9.2.0 (2024-07-17) -v8.3.110 (2023-12-13) -------------------------- - * Add index to help fetching scheduled event fires and another to find template translations by channel+external id +- Simplify permissions in flows app +- Tweak menu items for msg views and flow results -v8.3.109 (2023-12-13) -------------------------- - * Move last indexews from SQL file into Django models and drop unused +## v9.1.198 (2024-07-17) -v8.3.108 (2023-12-12) -------------------------- - * Move all remaining flowrun and flowsession indexes onto their models +- Allow template image variables to be text with expressions -v8.3.107 (2023-12-12) -------------------------- - * Fix channel log display when missing URN - * Queued message treatment, flow editor fix - * Update poetry deps - * Move more indexes onto models and remove unnecessary one +## v9.1.196 (2024-07-16) -v8.3.106 (2023-12-11) -------------------------- - * Cleanup indexes for FlowStartCount, SystemLabelCount and ContactGroupCount - * Use datetime timezone aliased as tzone - * Update django timezone field to 6.1.0 +- Add **repr** to more models and tweak existing ones for consistency +- Fix rendering of flow starts for deleted flows +- Add data migration to trim old broadcasts to nodes that resulted in very large contact lists -v8.3.105 (2023-12-07) -------------------------- - * Email changes should reset email status to unverified +## v9.1.195 (2024-07-16) -v8.3.104 (2023-12-07) -------------------------- - * Remove duplication between channel read and chart views - * Cleanup indexes in channels app - * Remove unhelpful index on eventfire and move other into Django model +- Remove special error handling for broadcast to node that resolves to no recipients +- Fix setting a template on a new broadcast +- Fix query broadcast creation and update +- Add rendering of exclusions on broadcasts +- Fix not showing query on broadcast recipients list and add node_uuid -v8.3.103 (2023-12-05) -------------------------- - * Data migration to fix bad last seen on values - * Add support for user to start the email verification and send themselves the verification link +## v9.1.194 (2024-07-15) -v8.3.102 (2023-11-30) -------------------------- - * Testing auto-versioning again +- Add Broadcast.node_uuid field +- Remove old code for getting message created_by from broadcasts +- Make some exception clauses more specific -v8.3.99 (2023-11-29) -------------------------- - * Fix syncing OTP utility templates - * Drop unused TemplateTranslate.language and country fields +## v9.1.193 (2024-07-15) -v8.3.98 (2023-11-29) -------------------------- - * Fix mailroom DB templates components structure - * Bump cryptography from 41.0.4 to 41.0.6 - * Stop writing TemplateTranslation.language and country and remove unsupported language as a possibility +- Replace TemplateTranslation.STATUS_UNSUPPORTED completely -v8.3.97 (2023-11-28) -------------------------- - * Stop reading from TemplateTranslation.language and country - * Undocument the templates API endpoint and add locale field to translations - * Fix syncing OTP utility templates +## v9.1.192 (2024-07-15) -v8.3.96 (2023-11-27) -------------------------- - * Migration to backfill TemplateTranslation.locale and external_locale +- Add new template statuses and stop using fake "unsupported" status -v8.3.95 (2023-11-27) -------------------------- - * Add TemplateTranslation.locale and .external_locale to replace language and country - * Support saving components and params to message templates +## v9.1.191 (2024-07-15) -v8.3.94 (2023-11-23) -------------------------- - * Update temba-components +- Fix deactivating a legacy WhatsApp channel +- Update format of templates on API endpoint +- Show template translation problems as errors on template read page -v8.3.93 (2023-11-23) -------------------------- - * Fix IVR simulation +## v9.1.190 (2024-07-12) -v8.3.92 (2023-11-22) -------------------------- - * Tweak appearance of API explorer +- Fix padding for broadcast schedule update -v8.3.91 (2023-11-21) -------------------------- - * Cleanup API docs +## v9.1.189 (2024-07-12) -v8.3.90 (2023-11-17) -------------------------- - * Add pillow dependency +- Fix mailroom_db +- Data migration to populate TemplateTranslation.is_supported and is_compatible -v8.3.89 (2023-11-15) -------------------------- - * Don't allow oeverwriting of flows with a different type during imports - * Enforce unique addresses for more channel types +## v9.1.188 (2024-07-12) -v8.3.88 (2023-11-14) -------------------------- - * Expose org.input_collation on languages formax - * Remove blog redirect pattern and sitemap - * Add unique_address to channel type and use that to validate channel is unique before claiming it +- Add new boolean fields to TemplateTranslation model to determine whether it's usable -v8.3.87 (2023-11-13) -------------------------- - * Data migration to delete schedules attached to deleted triggers - * Simulator should use workspace collation setting - * Don't include email only notifications in unseen count for UI +## v9.1.187 (2024-07-12) -v8.3.86 (2023-11-13) -------------------------- - * Update mailroom endpoint names +- Add templates to broadcasts -v8.3.85 (2023-11-10) -------------------------- - * Data migration to pause schedules of existing archived triggers +## v9.1.186 (2024-07-11) -v8.3.84 (2023-11-09) -------------------------- - * Allow schedules to be paused when triggers are archived +- Fix handling of POSTs to API docs +- Exclude empty templates from list, and show base translation apart on read page +- Ensure we choose a new base for a template whenever an existing base translation is deleted -v8.3.83 (2023-11-09) -------------------------- - * Fix login redirection to next param - * Drop no longer used fields on Schedule and Label - * Overrride mailroom URL in mailroom_db command - * Add view to verify email +## v9.1.185 (2024-07-11) -v8.3.82 (2023-11-08) -------------------------- - * Ensure that schedules are actually deleted when a broadcast or trigger is soft deleted - * Fix trigger list keyword search - * Make Notifications.medium non-null and use to filter notifications on API endpoint - * Make deprecated fields o schedule nullable - * Remove unused ScheduleCRUDL +- Update deps +- Replace telegram library by requests use +- Fix dashboard menu link permission +- Expose Template.base_translation on API endpoint -v8.3.81 (2023-11-07) -------------------------- - * Add data migration to backfill Notification.medium - * Add data migration to actually delete inactive schedules +## v9.1.184 (2024-07-11) -v8.3.80 (2023-11-07) -------------------------- - * Fix constraint on Trigger to allow deleting of schedules - * Add medium field Notification to let us model notifications which should be email only +- Use dropdowns for location fields -v8.3.79 (2023-11-07) -------------------------- - * Add data migration to delete ended and orphaned schedules - * Remove no longer used flow_type field on queued flow starts +## v9.1.183 (2024-07-11) -v8.3.78 (2023-11-02) -------------------------- - * Update scheduled broadcast to send now +- Use dropdowns for location fields -v8.3.77 (2023-11-01) -------------------------- - * Move optins inside compose widget +## v9.1.182 (2024-07-10) -v8.3.76 (2023-11-01) -------------------------- - * Fix org start view when org isn't set - * Add data migration to remove scheduled triggers without a schedule and constraint to prevent new ones - * Fix not showing non-field errors on wizard forms +- Locations API endpoint should allow searching on the path +- Fix template syncing when channel gives us invalid template data -v8.3.75 (2023-10-31) -------------------------- - * Remove register "trigger" type - * Add user settings fields for email verification - * Update trigger type icons - * Allow staff to add users - * Add send broadcast and start flow bulk actions to contact group page +## v9.1.181 (2024-07-10) -v8.3.74 (2023-10-30) -------------------------- - * Update temba-components with attachment rendering +- Add Template.base_translation +- Fix dashboard workspace data +- Allow creation of contacts with non-active statuses -v8.3.73 (2023-10-30) -------------------------- - * Add quick replies to broadcasts +## v9.1.180 (2024-07-10) -v8.3.72 (2023-10-27) -------------------------- - * Make sure the missing external ID we make for D360 channels is truncated to 64 characters - * Un-gate optins - * Add support for Facebook login for business configurations - * Move API token formax to Account section +- Drop no longer used is_active field from TemplateTranslation +- Tweak wording on template list page +- Add db constraint to ensure contact status is valid -v8.3.71 (2023-10-26) -------------------------- - * Consistent brand references in templates +## v9.1.179 (2024-07-10) -v8.3.70 (2023-10-26) -------------------------- - * Merge pull request #4930 from nyaruka/use-org-brand-domain - * Remove brand link - * Replace all brand link with brand domain use +- Keep FCM ID in channel config when soft deleting the channel +- Stop using TemplateTranslation.is_active and make nullable -v8.3.69 (2023-10-26) -------------------------- - * Use org brand domain instead of link - * Update to use Facebook API v18.0 +## v9.1.178 (2024-07-09) -v8.3.67 (2023-10-26) -------------------------- - * Update revisions url +- Allow broadcast creation with zero matches -v8.3.66 (2023-10-25) -------------------------- - * Simplify brands +## v9.1.177 (2024-07-08) -v8.3.65 (2023-10-25) -------------------------- - * Fix and cleanup view for accepting invitations +- Hard delete remaining soft-deleted template translations -v8.3.64 (2023-10-25) -------------------------- - * Fix start views for agent users - * Allow agent users to access account settings page - * Move two factor views out of main menu and into the account view +## v9.1.176 (2024-07-08) -v8.3.63 (2023-10-23) -------------------------- - * Fix SendBroadcast action to work with localized compose +- Update Template to a TembaModel +- Hard delete template translations that no longer exist on the channel side -v8.3.62 (2023-10-23) -------------------------- - * Make Trigger.priority non-null and use for ordering +## v9.1.175 (2024-07-05) -v8.3.61 (2023-10-23) -------------------------- - * Add data migration to backfill Trigger.priority +- Make send_when optional when updating broadcasts -v8.3.60 (2023-10-23) -------------------------- - * Add Trigger.priority and start writing +## v9.1.174 (2024-07-05) -v8.3.59 (2023-10-20) -------------------------- - * Fix maxlength for campaign events and focus on compose +- Fix updating scheduled broadcasts +- Remove old unused code for queueing broadcasts -v8.3.58 (2023-10-19) -------------------------- - * Allow triggers to wrap +## v9.1.173 (2024-07-05) -v8.3.57 (2023-10-19) -------------------------- - * Update oxford template filter to allow different conjunctions - * Move all trigger type templates into their own folders - * Add data migration to merge compatible keyword triggers +- Add Msg.is_android field +- Add internal API endpoint for searching locations by level and name +- Remove option to send now on broadcast update -v8.3.56 (2023-10-18) -------------------------- - * Improve display of triggers on list pages - * Support multiple keywords per trigger in UI - * Fix WA legacy config page +## v9.1.172 (2024-07-04) -v8.3.55 (2023-10-17) -------------------------- - * Show urns properly for urn change events - * Use localized validation errors for import validation - * Support multi-keyword triggers in exports and imports +- Add templates to broadcasts (hidden for now) +- Remove deprecated broadcast.template_state field on mailroom queue payload -v8.3.54 (2023-10-17) -------------------------- - * Drop Trigger.keyword +## v9.1.171 (2024-07-03) -v8.3.53 (2023-10-17) -------------------------- - * Fix fetching of keywords across triggers when editing a flow +- Update payload for queueing a bradocast -v8.3.52 (2023-10-17) -------------------------- - * Stop writing Trigger.keyword +## v9.1.170 (2024-07-03) -v8.3.51 (2023-10-17) -------------------------- - * Only read from Trigger.keywords +- Remove no longer needed task to sync stale Android relayers +- Don't allow template localization +- Update dependencies -v8.3.50 (2023-10-16) -------------------------- - * Make ticketer nullable on ticket - * Convert tickets API endpoints to use CRUDL perms - * Make sure we show the issue icon on the flow list page +## v9.1.169 (2024-07-02) -v8.3.49 (2023-10-13) -------------------------- - * Add data migration to populate keywords on trigger - * Add localization to create broadcast wizard +- Use python 3.11.x +- Add Broadcast.template_variables +- Add new template list and read pages and remove old channel specific ones +- Fix globals list template -v8.3.47 (2023-10-12) -------------------------- - * Add Trigger.keywords and start writing - * Switch contacts API endpoints to use CRUDL perms - * Cleanup BroadcastCRUDL.Send which is now only for sending to a flow node - * Remove unused LabelCRUDL.List view - * Convert messages, media and label API endpoints to use CRUDL perms +## v9.1.168 (2024-06-28) -v8.3.46 (2023-10-11) -------------------------- - * Remove no longer needed deprecated options on definitions endpoint - * Replace orgs.org_api permission - * Drop no longer used fields on FlowRevision +- Don't sync classifiers in suspended orgs +- Fix empty contact search with query present -v8.3.45 (2023-10-10) -------------------------- - * Show exclusion groups on trigger list pages - * Fix updating keyword triggers for flows - * Make sure we display trigger channel if set - * Limit access to API explorer to editors and admins - * Convert resthook API endpoints to use CRUDL based permissions +## v9.1.167 (2024-06-28) -v8.3.44 (2023-10-06) -------------------------- - * Allow request optin if optins exist - * Fix blurb for opt-out trigger - * Remove last usages of FlowLabel.parent and FlowRevision.modifiy_by - * Switch optins, topics, ticketers and templates API endpoints to use CRUDL perms - * Replace brand specific flow users with a single system user +- Disallow empty recipient targeting +- Fix external links within spa container -v8.3.43 (2023-10-05) -------------------------- - * Update editor and components +## v9.1.166 (2024-06-27) -v8.3.42 (2023-10-05) -------------------------- - * Make channel on trigger forms clearable - * Prepare unused fields on FlowRevision for removal and change all models in flows app to use orgs.User - * Allow beta testers to access optin features - * Switch flows, flow_starts and runs API endpoints to use CRUDL permissions - * Add optional channel field to call triggers types that are based on channel activity +- Tweak logging for failure during classifier syncing +- Switch broadcast tests to use contact search -v8.3.41 (2023-10-04) -------------------------- - * Add optin as field to channelevents - * Allow perms to be made API specific so that we can limit agent access to the UI +## v9.1.165 (2024-06-27) -v8.3.40 (2023-10-03) -------------------------- - * Remove globals from agent store when missing permission - * Remove arst +- Rework remaining mailroom client methods +- Add unique constraint on template translations -v8.3.39 (2023-10-03) -------------------------- - * Fix compose clear on send - * Use more CRUDL perms with API endpoints +## v9.1.164 (2024-06-27) -v8.3.38 (2023-10-03) -------------------------- - * Remove completion from contact chat - * Do not recreate the events when the campaign is archived +- Add data migration to remove duplicate template translations -v8.3.37 (2023-10-02) -------------------------- - * Abstract functionality for triggers based on channel actvity into base classes - * API endpoint should default to CRUDL based permissions if permission not specified - * Update to use Facebook API v17 +## v9.1.163 (2024-06-27) -v8.3.36 (2023-09-29) -------------------------- - * Remove minutes label from channel chart - * Add workspace breakdown for dashboard +- Change template translation syncing to enforce uniqueness over channel+locale -v8.3.35 (2023-09-28) -------------------------- - * Update opt-in styling - * Fix generation of history events from messages with optins +## v9.1.162 (2024-06-27) -v8.3.34 (2023-09-28) -------------------------- - * Fix migration conflict +- Make templatetranslation locale non-null +- Add migration to release translations for released channels -v8.3.33 (2023-09-28) -------------------------- - * Fix rendering of optin triggers - * Completely remove channel alerts +## v9.1.161 (2024-06-27) -v8.3.32 (2023-09-27) -------------------------- - * Fix previous accidental merge to main to add optin import support - * Cleanup views accessing request org - * Add optin as option to broadcast create wizard +- Fix not releasing template translations when channel released -v8.3.30 (2023-09-27) -------------------------- - * Allow the target_urls of incident notifications to differ by type - * Use proper secret generation for recovery tokens and re-org code - * Fix task discover for legacy whatsapp channel type - * Implement channel disconnected alert as incident +## v9.1.160 (2024-06-27) -v8.3.29 (2023-09-26) -------------------------- - * Update editor to include opt-ins +- Fix creating scheduled broadcasts +- Tweak menu on campaign read page +- Update to latest smartmin -v8.3.28 (2023-09-26) -------------------------- - * Fix Contact Importss - * Rename old legacy channel types - * Add title to incident list page and tweak styling - * Implement email notifications for incidents - * Fix ticket squashable count models +## v9.1.159 (2024-06-26) + +- Simplify some button labels and make edit a button on contact read page +- Don't show empty contact filter list +- Rework more mailroom client methods to use models instead of primitives + +## v9.1.158 (2024-06-26) + +- Add day selection when doing flow start search +- Tweak mailroom_db to run on different port + +## v9.1.157 (2024-06-25) + +- Reorg of mailroom client +- Add Broadcast.exclusions + +## v9.1.156 (2024-06-24) + +- Change broadcast creation from UI to use mailroom + +## v9.1.155 (2024-06-24) + +- Fix WAC to addEventListener in OnSpload +- Fix horizontal scrolling for contacts list +- Add Broadcast.template + +## v9.1.154 (2024-06-21) + +- Fix z-index issue properly + +## v9.1.153 (2024-06-21) + +- Fix z-index issue with content menu and chat + +## v9.1.152 (2024-06-21) + +- Fix ticket switching bug + +## v9.1.151 (2024-06-21) + +- Update chat rendering + +## v9.1.148 (2024-06-20) + +- Fix Broadcast.create + +## v9.1.147 (2024-06-20) + +- Use mailroom to create broadcasts from API calls +- Use mailroom to send broadcasts to flow nodes + +## v9.1.146 (2024-06-17) + +- Don't clip footer when ticket history grows +- Fix migration to add uuid field to airtime transfers + +## v9.1.145 (2024-06-17) + +- Don't send forgot password email if one was sent in last 5 minutes +- Delete failed login records on successful password reset +- Make transer UUID unique field, use TembaUUIDMixin on model + +## v9.1.144 (2024-06-14) + +- Add pagination on channel templates page +- Add settings config for Android clients FCM config +- Remove pyfcm and use google auth library to send sync messages for FCM +- Create our own password recovery view + +## v9.1.143 (2024-06-12) + +- Update smartmin +- Delete recovery tokens when new ones are created or email changed +- Populate airtime transfer uuids + +## v9.1.142 (2024-06-12) + +- Add AirtimeTransfer.external_id +- Add data migration to cleanup template translations + +## v9.1.141 (2024-06-12) + +- Update to latest smartmin +- Add uuid field to airtime transfer model + +## v9.1.140 (2024-06-12) + +- Really actually fix template attachments for real + +## v9.1.139 (2024-06-11) + +- Fix split issue for template editor + +## v9.1.138 (2024-06-10) + +- Template editor fix for empty content +- Tweak component types to be header/_, body/_ etc +- Support Twilio media in templates + +## v9.1.137 (2024-06-10) + +- Support WhatsApp templates with header images +- Remove no longer used URN related code +- Generate email verification secret when account created, change when email changed + +## v9.1.136 (2024-06-07) + +- Add spa mixin to transfer logs views +- Allow editing TWA messaging service SID +- Lean on mailroom for URN validation during contact update +- Some tidy up of the update contact form + +## v9.1.135 (2024-06-05) + +- Fix login error message styling +- Remove unused JS libs + +## v9.1.134 (2024-06-05) + +- Contact API endpoint should let mailroom decide if a URN is taken +- Revert "Remove csrf token hidden element not under a form" + +## v9.1.133 (2024-06-05) + +- Fix API explorer POSTs +- Make CSRF cookie age 2 weeks and remove non-form hidden CSRF hidden elements + +## v9.1.132 (2024-06-04) + +- Make sure the CSRF element is present for all page header blocks + +## v9.1.131 (2024-05-31) + +- Fix DT One submit buttons + +## v9.1.130 (2024-05-31) + +- Fix flow and msgs unlabel action +- Remove no longer used params field on synched whatsapp type templates + +## v9.1.129 (2024-05-29) + +- Increase DATA_UPLOAD_MAX_NUMBER_FIELDS to 2500 +- Fix FB and IG claim getFBpages + +## v9.1.128 (2024-05-27) + +- Lean on mailroom for validation of phone numbers from android events / messages + +## v9.1.127 (2024-05-27) + +- Rework contact create view to let mailroom do URN validation + +## v9.1.126 (2024-05-24) + +- Mailroom client should use content-type header on responses to know whether to parse as JSON +- Ensure anon users can access API docs + +## v9.1.125 (2024-05-23) + +- Add csrf on hidden element + +## v9.1.124 (2024-05-22) + +- Rework handling of errors from mailroom client +- Update test db flows + +## v9.1.123 (2024-05-20) + +- Replace django messages rendering with toasts + +## v9.1.121 (2024-05-16) + +- Fix action to remove from group. +- Report bulk action errors to users with django messages + +## v9.1.120 (2024-05-16) + +- Remove old unused ES sorting code +- Update to latest smartmin and disable auto success messages +- Add data migration to fix system fields for existing orgs and start using is_proxy +- Reduce reserved keys for fields to bare minimum + +## v9.1.119 (2024-05-16) + +- Add ContactField.is_proxy and reduce SYSTEM_FIELDS to the two proxy date fields +- Don't use error level alerts for form errors + +## v9.1.118 (2024-05-15) + +- Remove unused args from MailroomClient.parse_query +- Re-add search errors to contact list views + +## v9.1.117 (2024-05-15) + +- Add support for unknown_property_type search errors +- Add support for twilio card type content templates +- Add way to view webhook logs errors only + +## v9.1.116 (2024-05-14) + +- Fix issues with twilio templates sync + +## v9.1.115 (2024-05-10) + +- Fix Twilio template type slug and register its template type + +## v9.1.114 (2024-05-10) + +- Add message templates menu for TWA channels +- Activate Twilio Whatsapp to sync templates with twilio type +- Update to allow matching sender ID as valid phones + +## v9.1.113 (2024-05-09) + +- Fix gaps it contact history + +## v9.1.112 (2024-05-09) + +- Ignore android msg/event cmds with non numeric phones + +## v9.1.111 (2024-05-08) + +- Send phone instead of urn to mailroom android endpoints +- Add Twilio content template type, and TWA fetch_templates + +## v9.1.110 (2024-05-08) + +- Remove messages block that duplicates alert-messages +- Tweak DefinitionExport.name for consistency + +## v9.1.109 (2024-05-07) + +- Tweak export finished emails so they don't say Excel + +## v9.1.108 (2024-05-07) + +- Update temba-components to 0.86.1 +- Change flow definitions export to be async, use new export type + +## v9.1.107 (2024-05-07) + +- Fix variable name in http log read page +- Fix claiming instagram + +## v9.1.106 (2024-05-06) + +- Fix globals API endpoint + +## v9.1.105 (2024-05-03) + +- Fix race condition on editor load + +## v9.1.104 (2024-05-03) + +- Fix template bug and loading error for editor + +## v9.1.103 (2024-05-02) + +- Fix contact field selection + +## v9.1.102 (2024-05-02) + +- Delete all sessions and runs in org deletion in batches +- Tiny style change for loader wrapping on editor + +## v9.1.101 (2024-05-01) + +- Update editor and flow spec version + +## v9.1.100 (2024-04-29) + +- Tweak time limit for sessions to 89 days so things are always interrupted before archiver gets to them +- Cleanup API endpoint docs + +## v9.1.99 (2024-04-26) + +- Remove elastic search +- Add support for read msg status + +## v9.1.98 (2024-04-25) + +- Fix ticket status selection + +## v9.1.97 (2024-04-25) + +- Include url for org chooser + +## v9.1.96 (2024-04-25) + +- Remove jQuery + +## v9.1.95 (2024-04-25) + +- Change ordering of non-search based exports to be id to match search based +- Use mailroom endpoint for search based contact exports +- Remove cancel button from contact import page and remove duplicate styles +- Tweak layout of user edit form +- Email notification that account email has changed should include the new email address + +## v9.1.94 (2024-04-24) + +- Fix changing password so user isn't logged out +- Fix user edit form allowing insecure passwords + +## v9.1.93 (2024-04-24) + +- Add notification types for when email or password is changed +- Expire unaccepted invitations after 30 days +- Move invitation form into modal + +## v9.1.92 (2024-04-23) + +- Remove start url for surveyors and instead do login redirect +- Fix to disallow content type vs extension mismatching for media uploads +- Fix to limit sending user verification email to 1 per 10 minutes +- Remove warning for flows that don't specify Facebook topic + +## v9.1.91 (2024-04-18) + +- Fix select race +- Fix header matching +- Simplify URL for template list page + +## v9.1.90 (2024-04-16) + +- Fix race on initial load for select and tabs + +## v9.1.89 (2024-04-16) + +- Fix API docs scrolling +- Fix mailroom_db data file +- Simplify channel claim page styling and remove unused styles +- Add Msg.templating + +## v9.1.88 (2024-04-15) + +- Drop FlowRun.submitted_by and cleanup superfulous constants +- Make whatsapp template type an actual package +- Simplify page titles so section isn't repeated in title + +## v9.1.87 (2024-04-12) + +- Add inline attachment style and wrapping on logs +- Don't re-release released triggers + +## v9.1.86 (2024-04-12) + +- Prune unnecessary styles, move to heavier fonts + +## v9.1.85 (2024-04-12) + +- Drop support for Submitted By in results exports +- Add constraint to limit Msg.DIRECTION to I or O +- Add constraint to incoming messages have channel and URN + +## v9.1.83 (2024-04-11) + +- Add TemplateType and rework whatsapp to be a type +- Remove special treatment for exports of surveyor flows +- Add TemplateTranslation.variables + +## v9.1.82 (2024-04-10) + +- Unpublicize the channel events API endpoint +- Drop unused Msg.queued_on field + +## v9.1.81 (2024-04-10) + +- Update temba-components + +## v9.1.80 (2024-04-10) + +- Assume js is pre-minified + +## v9.1.79 (2024-04-09) + +- Update flow editor + +## v9.1.78 (2024-04-09) + +- Use new components bundle + +## v9.1.77 (2024-04-09) + +- Deprecate Msg.queued_on as it isn't used and make Msg.modified_on non-null + +## v9.1.76 (2024-04-08) + +- Add data migration to backfill missing user settings +- Add signal receiver to ensure new users always have settings + +## v9.1.75 (2024-04-04) + +- Add data migration to archive campaigns with deleted groups +- Fix rendering of campaigns with deleted groups +- Improve styling on template list page + +## v9.1.74 (2024-04-04) + +- Update temba-components +- Use timedate formatting for last_seen_on / created_on on contact list pages +- Remove unused BRAND properties +- Cleanup displaying of channel name, address and type + +## v9.1.73 (2024-04-03) + +- Make Channel.name non-null and remove unused channel list view +- Replace format_datetime and short_datetime tags with day or datetime filters + +## v9.1.72 (2024-04-03) + +- Update temba-components +- Add data migration to backfill empty channel names +- Ensure Android channels get a default name when registering + +## v9.1.71 (2024-04-03) + +- Ignore empty messages from Android relayers + +## v9.1.70 (2024-04-03) + +- Update flow editor +- Remove unused option on assets endpoint to return environment + +## v9.1.69 (2024-04-02) + +- Remove no longer used template tag as_icon +- Fix export blocking due to multiple users exporting at same time +- Switch formax to expand vertically +- Add ChannelEvent.status field and prevent creating channel events of unknown types from Android syncs + +## v9.1.68 (2024-04-02) + +- Use mailroom endpoints to create messages and events during Android syncing +- Drop support for returning template components as dict + +## v9.1.67 (2024-04-01) + +- Update template editor to work with comps as list +- Add task to trim old channel events + +## v9.1.66 (2024-03-28) + +- Update format of tasks queued to mailroom + +## v9.1.65 (2024-03-28) + +- Update to django 5.0 and DRF 3.15.1 + +## v9.1.64 (2024-03-25) + +- Tweak menu styling + +## v9.1.63 (2024-03-22) + +- Add open tab event + +## v9.1.62 (2024-03-22) + +- Make workspace selection use common event pattern +- Truncate long template name to not break the page +- Replace iso630 with iso639-lang package +- Fix non Django 5 compatible code + +## v9.1.61 (2024-03-21) + +- Support for menu events + +## v9.1.60 (2024-03-21) + +- Update to latest ruff, isort and djlint +- Drop TemplateTranslation.comps_as_dict +- Get rid of channel typed owned sync log views and use new channel view on HTTP log CRUDL +- Convert templates views to actual CRUDL and fix permissions + +## v9.1.59 (2024-03-21) + +- Move template code into templates app +- Stop writing TemplateTranslation.comps_as_dict + +## v9.1.58 (2024-03-20) + +- Some fixes for on-device mobile issues +- Allow returning of components in list format from API endpoint +- Update to latest black +- Don't try to extract parameters from template url button component display values + +## v9.1.57 (2024-03-20) + +- Add name field also to template components +- Tweak template list page to use components list instead of comps_as_dict + +## v9.1.56 (2024-03-19) + +- Save TemplateTranslation.components as list, use comps_as_dict for API endpoint + +## v9.1.55 (2024-03-19) + +- Add temporary TemplateTranslation.comps_as_dict field + +## v9.1.54 (2024-03-19) + +- Add type to template components +- Remove deprecated fields from template translations + +## v9.1.53 (2024-03-18) + +- Fix mobile notice + +## v9.1.52 (2024-03-18) + +- Don't migrate flows when listing campaign events + +## v9.1.51 (2024-03-17) + +- Tweaks to make the interface more mobile friendly + +## v9.1.50 (2024-03-17) + +- Better feedback when editing contact fields + +## v9.1.49 (2024-03-15) + +- Add url param type for buttons with URLs + +## v9.1.48 (2024-03-14) + +- Show more components for WA templates list +- Add display to WA templates button components + +## v9.1.47 (2024-03-14) + +- Remove old templates API endpoint +- Update flow version for campaigns events single message flows + +## v9.1.46 (2024-03-13) + +- Reduce WA template sync error logging to ignore those in http logs + +## v9.1.45 (2024-03-12) + +- Fix the size limit for contact exports + +## v9.1.44 (2024-03-12) + +- Drop old export models and assets app + +## v9.1.43 (2024-03-11) + +- Data migration to delete old flow results exports +- Data migration to delete old msgs exports + +## v9.1.42 (2024-03-11) + +- Data migration to delete old contacts exports + +## v9.1.41 (2024-03-11) + +- Mark templates with button URLs and attachment in header not supported +- Convert exports to use shared export modal view + +## v9.1.40 (2024-03-08) + +- Allow more WhatsApp templates to be usable in the flows + +## v9.1.39 (2024-03-07) + +- Updated editor with sendmsg update fix +- Improve contact export modal and use mailroom endpoint to know how many contacts will be exported + +## v9.1.38 (2024-03-07) + +- Updated component button rendering + +## v9.1.37 (2024-03-07) + +- Do not sync templates for channels on suspended orgs or inactive orgs +- Redact WA password config in HTTP logs + +## v9.1.36 (2024-03-06) + +- Bump spec version to 13.4 +- Update editor to support template components + +## v9.1.35 (2024-03-06) + +- Restrict exports of contact groups that are too big +- Redact auth tokens from http logs when fetching whatsapp templates +- Cleanup code for fetching whatsapp templates and only create incidents after 5 failures +- Add data migration to delete old ticket exports + +## v9.1.34 (2024-03-04) + +- Update floweditor + +## v9.1.33 (2024-03-04) + +- Bump current flow spec version to 13.3 +- Ensure incidents are ended when releasing a channel + +## v9.1.32 (2024-03-04) + +- Update temba-components +- Always send verification email with branding of current org +- Add incident for WhatsApp templates sync failed + +## v9.1.31 (2024-02-28) + +- Fix editing user when language is not an option + +## v9.1.30 (2024-02-28) + +- Hide UI language options when there aren't any +- Update test_db templates + +## v9.1.29 (2024-02-27) + +- Remove DS from available channel and only accessible to beta group +- Prevent further creation of surveyor users since that functionality no longer works + +## v9.1.28 (2024-02-22) + +- Store servicing flag in session to avoid needing user orgs in context processor +- Add select_related to user loading for sessions and API tokens +- Bump cryptography from 42.0.2 to 42.0.4 + +## v9.1.27 (2024-02-21) + +- Update floweditor + +## v9.1.26 (2024-02-18) + +- Bump cryptography from 42.0.0 to 42.0.2 +- Improve the form for setting flow SMTP and make reusable + +## v9.1.25 (2024-02-14) + +- Update temba-components + +## v9.1.24 (2024-02-12) + +- Use dict for flow type icons instead of nested if elses +- Simplify export finished notification emails +- Use Org.Export for flows results exports + +## v9.1.23 (2024-02-09) + +- Fix org avatar scale for menu +- Fix widget for user avatar + +## v9.1.22 (2024-02-08) + +- Fix croppie dependency +- Prefetch user settings on users endpoint + +## v9.1.21 (2024-02-08) + +- Make user settings one to one + +## v9.1.20 (2024-02-08) + +- Use orgs.Export for messages exports +- Simplify sending template emails +- Add new endpoint to internal API for templates +- Trim old export and notifications +- Add support for user avatars + +## v9.1.19 (2024-02-07) + +- Save transformed components for WA templates + +## v9.1.18 (2024-02-06) + +- Cleanup flow SMTP formax and show parent settings as default to match mailroom changes +- Remove old code for saving SMTP into org config + +## v9.1.17 (2024-02-06) + +- Data migration to backfill Org.flow_smtp + +## v9.1.16 (2024-02-06) + +- Add new dedicated Org.flow_smtp field for email settings + +## v9.1.15 (2024-02-06) + +- Bump cryptography from 41.0.7 to 42.0.0 +- Simplify getting default flow email address + +## v9.1.14 (2024-01-30) + +- Remove using readonly DB connection for fetching groups and fields + +## v9.1.13 (2024-01-29) + +- Simplify how we check for existing running exports +- Dta migration to mark old notifications as seen +- Improve export download page +- Allow marking all notifications as read by DELETE request to notifications endpoint +- Use orgs.Export for contact exports + +## v9.1.12 (2024-01-23) + +- Tweak mailgun channel claiming + +## v9.1.11 (2024-01-18) + +- Some cleanup to new exports framework + +## v9.1.10 (2024-01-18) + +- Add skeleton staff only mailgun channel type +- Add export download view + +## v9.1.7 (2024-01-18) + +- Update temba-components +- Save storage path on exports and fix ticket exports not having a download URL + +## v9.1.6 (2024-01-18) + +- Add new generic orgs.Export model and replace ExportTicketsTask +- Simplify messaging when export is started + +## v9.1.5 (2024-01-15) + +- Allow webchat channels to have new convo triggers +- Finished exports should record number of items exported + +## v9.1.4 (2024-01-12) + +- Add skeleton temba chat channel type + +## v9.1.3 (2024-01-12) + +- Add notification for flow exports + +## v9.1.2 (2024-01-11) + +- Fix issue with completion input focus + +## v9.1.1 (2024-01-11) + +- Update notification text + +## v9.1.0 (2024-01-11) + +- Add notifications to UI +- Fix test_db command +- Update stable versions in README + +## v9.0.0 (2024-01-05) + +- Test against mailroom v9 +- Replace dummy migrations with real squashed migrations + +## v8.3.123 (2024-01-05) + +- Add empty versions of squashed migrations + +## v8.3.122 (2024-01-04) + +- Update to latest editor + +## v8.3.121 (2024-01-04) + +- Update to latest floweditor with open ticket changes + +## v8.3.120 (2024-01-03) + +- Allow ticket body to be optional + +## v8.3.119 (2024-01-03) + +- Drop ticketer model + +## v8.3.118 (2024-01-03) + +- Remove view of http logs by ticketer +- Drop Ticket.ticketer and HTTPLog.ticketer + +## v8.3.117 (2024-01-03) + +- Remove ticketer types + +## v8.3.116 (2024-01-03) + +- Fix editor routing edge case +- Remove ticketers API endpoint + +## v8.3.115 (2024-01-02) + +- Update to latest flow editor +- Drop index on ticket.external_id + +## v8.3.114 (2024-01-02) + +- Stop exposing ticket ticketer on endpoints + +## v8.3.113 (2024-01-02) + +- Update temba-components +- Finish cleaning up API v2 tests to use APITestMixin + +## v8.3.112 (2023-12-14) + +- ContactChat with less padding + +## v8.3.111 (2023-12-14) + +- Introduce footer + +## v8.3.110 (2023-12-13) + +- Add index to help fetching scheduled event fires and another to find template translations by channel+external id + +## v8.3.109 (2023-12-13) + +- Move last indexews from SQL file into Django models and drop unused + +## v8.3.108 (2023-12-12) + +- Move all remaining flowrun and flowsession indexes onto their models + +## v8.3.107 (2023-12-12) + +- Fix channel log display when missing URN +- Queued message treatment, flow editor fix +- Update poetry deps +- Move more indexes onto models and remove unnecessary one + +## v8.3.106 (2023-12-11) + +- Cleanup indexes for FlowStartCount, SystemLabelCount and ContactGroupCount +- Use datetime timezone aliased as tzone +- Update django timezone field to 6.1.0 + +## v8.3.105 (2023-12-07) + +- Email changes should reset email status to unverified + +## v8.3.104 (2023-12-07) + +- Remove duplication between channel read and chart views +- Cleanup indexes in channels app +- Remove unhelpful index on eventfire and move other into Django model + +## v8.3.103 (2023-12-05) + +- Data migration to fix bad last seen on values +- Add support for user to start the email verification and send themselves the verification link + +## v8.3.102 (2023-11-30) + +- Testing auto-versioning again + +## v8.3.99 (2023-11-29) + +- Fix syncing OTP utility templates +- Drop unused TemplateTranslate.language and country fields + +## v8.3.98 (2023-11-29) + +- Fix mailroom DB templates components structure +- Bump cryptography from 41.0.4 to 41.0.6 +- Stop writing TemplateTranslation.language and country and remove unsupported language as a possibility + +## v8.3.97 (2023-11-28) + +- Stop reading from TemplateTranslation.language and country +- Undocument the templates API endpoint and add locale field to translations +- Fix syncing OTP utility templates + +## v8.3.96 (2023-11-27) + +- Migration to backfill TemplateTranslation.locale and external_locale + +## v8.3.95 (2023-11-27) + +- Add TemplateTranslation.locale and .external_locale to replace language and country +- Support saving components and params to message templates + +## v8.3.94 (2023-11-23) + +- Update temba-components + +## v8.3.93 (2023-11-23) + +- Fix IVR simulation + +## v8.3.92 (2023-11-22) + +- Tweak appearance of API explorer + +## v8.3.91 (2023-11-21) + +- Cleanup API docs + +## v8.3.90 (2023-11-17) + +- Add pillow dependency + +## v8.3.89 (2023-11-15) + +- Don't allow oeverwriting of flows with a different type during imports +- Enforce unique addresses for more channel types + +## v8.3.88 (2023-11-14) + +- Expose org.input_collation on languages formax +- Remove blog redirect pattern and sitemap +- Add unique_address to channel type and use that to validate channel is unique before claiming it + +## v8.3.87 (2023-11-13) + +- Data migration to delete schedules attached to deleted triggers +- Simulator should use workspace collation setting +- Don't include email only notifications in unseen count for UI + +## v8.3.86 (2023-11-13) + +- Update mailroom endpoint names + +## v8.3.85 (2023-11-10) + +- Data migration to pause schedules of existing archived triggers + +## v8.3.84 (2023-11-09) + +- Allow schedules to be paused when triggers are archived + +## v8.3.83 (2023-11-09) + +- Fix login redirection to next param +- Drop no longer used fields on Schedule and Label +- Overrride mailroom URL in mailroom_db command +- Add view to verify email + +## v8.3.82 (2023-11-08) + +- Ensure that schedules are actually deleted when a broadcast or trigger is soft deleted +- Fix trigger list keyword search +- Make Notifications.medium non-null and use to filter notifications on API endpoint +- Make deprecated fields o schedule nullable +- Remove unused ScheduleCRUDL + +## v8.3.81 (2023-11-07) + +- Add data migration to backfill Notification.medium +- Add data migration to actually delete inactive schedules + +## v8.3.80 (2023-11-07) + +- Fix constraint on Trigger to allow deleting of schedules +- Add medium field Notification to let us model notifications which should be email only + +## v8.3.79 (2023-11-07) + +- Add data migration to delete ended and orphaned schedules +- Remove no longer used flow_type field on queued flow starts + +## v8.3.78 (2023-11-02) + +- Update scheduled broadcast to send now + +## v8.3.77 (2023-11-01) + +- Move optins inside compose widget + +## v8.3.76 (2023-11-01) + +- Fix org start view when org isn't set +- Add data migration to remove scheduled triggers without a schedule and constraint to prevent new ones +- Fix not showing non-field errors on wizard forms + +## v8.3.75 (2023-10-31) + +- Remove register "trigger" type +- Add user settings fields for email verification +- Update trigger type icons +- Allow staff to add users +- Add send broadcast and start flow bulk actions to contact group page + +## v8.3.74 (2023-10-30) + +- Update temba-components with attachment rendering + +## v8.3.73 (2023-10-30) + +- Add quick replies to broadcasts + +## v8.3.72 (2023-10-27) + +- Make sure the missing external ID we make for D360 channels is truncated to 64 characters +- Un-gate optins +- Add support for Facebook login for business configurations +- Move API token formax to Account section + +## v8.3.71 (2023-10-26) + +- Consistent brand references in templates + +## v8.3.70 (2023-10-26) + +- Merge pull request #4930 from nyaruka/use-org-brand-domain +- Remove brand link +- Replace all brand link with brand domain use + +## v8.3.69 (2023-10-26) + +- Use org brand domain instead of link +- Update to use Facebook API v18.0 + +## v8.3.67 (2023-10-26) + +- Update revisions url + +## v8.3.66 (2023-10-25) + +- Simplify brands + +## v8.3.65 (2023-10-25) + +- Fix and cleanup view for accepting invitations + +## v8.3.64 (2023-10-25) + +- Fix start views for agent users +- Allow agent users to access account settings page +- Move two factor views out of main menu and into the account view + +## v8.3.63 (2023-10-23) + +- Fix SendBroadcast action to work with localized compose + +## v8.3.62 (2023-10-23) + +- Make Trigger.priority non-null and use for ordering + +## v8.3.61 (2023-10-23) + +- Add data migration to backfill Trigger.priority + +## v8.3.60 (2023-10-23) + +- Add Trigger.priority and start writing + +## v8.3.59 (2023-10-20) + +- Fix maxlength for campaign events and focus on compose + +## v8.3.58 (2023-10-19) + +- Allow triggers to wrap + +## v8.3.57 (2023-10-19) + +- Update oxford template filter to allow different conjunctions +- Move all trigger type templates into their own folders +- Add data migration to merge compatible keyword triggers + +## v8.3.56 (2023-10-18) + +- Improve display of triggers on list pages +- Support multiple keywords per trigger in UI +- Fix WA legacy config page + +## v8.3.55 (2023-10-17) + +- Show urns properly for urn change events +- Use localized validation errors for import validation +- Support multi-keyword triggers in exports and imports + +## v8.3.54 (2023-10-17) + +- Drop Trigger.keyword + +## v8.3.53 (2023-10-17) + +- Fix fetching of keywords across triggers when editing a flow + +## v8.3.52 (2023-10-17) + +- Stop writing Trigger.keyword + +## v8.3.51 (2023-10-17) + +- Only read from Trigger.keywords + +## v8.3.50 (2023-10-16) + +- Make ticketer nullable on ticket +- Convert tickets API endpoints to use CRUDL perms +- Make sure we show the issue icon on the flow list page + +## v8.3.49 (2023-10-13) + +- Add data migration to populate keywords on trigger +- Add localization to create broadcast wizard + +## v8.3.47 (2023-10-12) + +- Add Trigger.keywords and start writing +- Switch contacts API endpoints to use CRUDL perms +- Cleanup BroadcastCRUDL.Send which is now only for sending to a flow node +- Remove unused LabelCRUDL.List view +- Convert messages, media and label API endpoints to use CRUDL perms + +## v8.3.46 (2023-10-11) + +- Remove no longer needed deprecated options on definitions endpoint +- Replace orgs.org_api permission +- Drop no longer used fields on FlowRevision + +## v8.3.45 (2023-10-10) + +- Show exclusion groups on trigger list pages +- Fix updating keyword triggers for flows +- Make sure we display trigger channel if set +- Limit access to API explorer to editors and admins +- Convert resthook API endpoints to use CRUDL based permissions + +## v8.3.44 (2023-10-06) + +- Allow request optin if optins exist +- Fix blurb for opt-out trigger +- Remove last usages of FlowLabel.parent and FlowRevision.modifiy_by +- Switch optins, topics, ticketers and templates API endpoints to use CRUDL perms +- Replace brand specific flow users with a single system user + +## v8.3.43 (2023-10-05) + +- Update editor and components + +## v8.3.42 (2023-10-05) + +- Make channel on trigger forms clearable +- Prepare unused fields on FlowRevision for removal and change all models in flows app to use orgs.User +- Allow beta testers to access optin features +- Switch flows, flow_starts and runs API endpoints to use CRUDL permissions +- Add optional channel field to call triggers types that are based on channel activity + +## v8.3.41 (2023-10-04) + +- Add optin as field to channelevents +- Allow perms to be made API specific so that we can limit agent access to the UI + +## v8.3.40 (2023-10-03) + +- Remove globals from agent store when missing permission +- Remove arst + +## v8.3.39 (2023-10-03) + +- Fix compose clear on send +- Use more CRUDL perms with API endpoints + +## v8.3.38 (2023-10-03) + +- Remove completion from contact chat +- Do not recreate the events when the campaign is archived + +## v8.3.37 (2023-10-02) + +- Abstract functionality for triggers based on channel actvity into base classes +- API endpoint should default to CRUDL based permissions if permission not specified +- Update to use Facebook API v17 + +## v8.3.36 (2023-09-29) + +- Remove minutes label from channel chart +- Add workspace breakdown for dashboard + +## v8.3.35 (2023-09-28) + +- Update opt-in styling +- Fix generation of history events from messages with optins + +## v8.3.34 (2023-09-28) + +- Fix migration conflict + +## v8.3.33 (2023-09-28) + +- Fix rendering of optin triggers +- Completely remove channel alerts + +## v8.3.32 (2023-09-27) + +- Fix previous accidental merge to main to add optin import support +- Cleanup views accessing request org +- Add optin as option to broadcast create wizard + +## v8.3.30 (2023-09-27) + +- Allow the target_urls of incident notifications to differ by type +- Use proper secret generation for recovery tokens and re-org code +- Fix task discover for legacy whatsapp channel type +- Implement channel disconnected alert as incident + +## v8.3.29 (2023-09-26) + +- Update editor to include opt-ins + +## v8.3.28 (2023-09-26) + +- Fix Contact Importss +- Rename old legacy channel types +- Add title to incident list page and tweak styling +- Implement email notifications for incidents +- Fix ticket squashable count models + +## v8.3.27 (2023-09-25) + +- Tweak mailroom_db to create an FBA channel instead of a TWT channel +- Remove ticketers as a feature and the views for connecting external ticketers +- Re-add optin as distinct message type +- Add undocumented API endpoint for opt-ins + +## v8.3.26 (2023-09-22) + +- Bump cryptography from 41.0.3 to 41.0.4 +- Add optin field to Broadcast + +## v8.3.25 (2023-09-21) + +- Fix trigger ordering + +## v8.3.24 (2023-09-21) + +- Add opt-in and opt-out trigger types (staff only for now) +- Group keyword triggers and catch all triggers under a Messages folder +- Move broadcasts and scheduled to their own pages + +## v8.3.23 (2023-09-21) + +- Replace Msg.type=optin with optin reference on msg +- Group trigger types into folders +- Make sure staff can update the log policy on all channel types + +## v8.3.22 (2023-09-19) + +- Make ticketers API endpoint unpublicized +- Add 'Send Now' to broadcast creation + +## v8.3.21 (2023-09-18) + +- Add basic OptIn model +- Use env variable for dev mode host + +## v8.3.20 (2023-09-12) + +- Update editor for localized attachment fix + +## v8.3.19 (2023-09-12) + +- Add new data migration to fix IVR call counts +- Drop Channel.parent, ContactURN.auth and Org.input_cleaners +- Remove support for delegate channels + +## v8.3.18 (2023-09-07) + +- Add data migration to populate ContactURN.auth_tokens + +## v8.3.17 (2023-09-06) + +- Add ContactURN.auth_tokens to replace .auth + +## v8.3.16 (2023-09-06) + +- Tweak documentation for flow_starts endpoint +- Allow agents to update tickets topics + +## v8.3.15 (2023-09-06) + +- Add hover-darker button option +- Update icons + +## v8.3.14 (2023-08-31) + +- Limit to load the recent 100 sessions +- Disallow GET request for media upload view + +## v8.3.13 (2023-08-28) + +- Tweaks to the channel config blurbs for consistency +- Fetching messages by label should include arched messages +- Use secrets module instead of random for random_string +- Little bit of cleanup in channel types like removing unused fields + +## v8.3.12 (2023-08-23) + +- Add ChannelType.config_ui to replace configuration_urls, configuration_blurb etc +- Show Somleng config URLs based on channel role +- Add Org.input_collation +- Remove Blackmnyna, Chikka, Junebug, Twitter legacy, old Zenvia channel types + +## v8.3.11 (2023-08-17) + +- Convert final haml templates in root directory + +## v8.3.10 (2023-08-17) + +- Add Org.input_cleaners +- Always show name / anon id for anon orgs in contact lists +- Don't let mailroom handle tasks during tests +- Fix title on welcome page + +## v8.3.9 (2023-08-16) + +- Fix onSpload fire when initial page doesn't call it + +## v8.3.8 (2023-08-16) + +- Use $ instead of onSpload + +## v8.3.7 (2023-08-16) + +- Fix Javascript on claim number view +- Switch test_db to assume a docker container + +## v8.3.6 (2023-08-15) + +- Convert haml templates in includes folder and utils app +- Cleanup page titles in settings section + +## v8.3.5 (2023-08-14) + +- Convert haml templates in public and orgs apps + +## v8.3.4 (2023-08-14) + +- Convert templates in assets, channels, msgs, request_logs and schedules apps as well as overridden smartmin templates + +## v8.3.3 (2023-08-10) + +- Simplify message indexes and system label queries + +## v8.3.2 (2023-08-10) + +- Add data migration to convert old I/F msg types + +## v8.3.1 (2023-08-09) + +- Merge pull request #4779 from nyaruka/less_haml +- Some tweaks to templates based on linter +- Convert all haml templates in channel types + +## v8.3.0 (2023-08-09) + +- Drop no longer used Org.brand field +- Add messagebird channel type + +## v8.2.0 (2023-08-07) + +- Update stable versions + +## v8.1.245 (2023-08-05) + +- Truncate query lables on flow start +- Fix line length formatting +- Fixes for login and API titles + +## v8.1.244 (2023-08-04) + +- Fix error handling for temba-contact-search + +## v8.1.243 (2023-08-03) + +- Fix DELETE endpoints in API explorer +- Bump cryptography from 41.0.2 to 41.0.3 + +## v8.1.242 (2023-08-02) + +- Update to components with modax serialize fix + +## v8.1.241 (2023-08-02) + +- Fix two factor disable and initial QR code rendering + +## v8.1.240 (2023-08-01) + +- Update components with checkbox value update +- Stop writing no longer used Org.brand + +## v8.1.239 (2023-08-01) + +- Temp fix for org export page by replacing temba-checkbox with regular inputs +- Cleanup msg_console + +## v8.1.238 (2023-07-28) + +- Fix flow start log when starts don't have exclusions +- Remove unnecessary CSS class to hover + +## v8.1.237 (2023-07-28) + +- Only consider the parsed query string in contact_search clean +- Add show CSS class to icon for contact list sorting + +## v8.1.236 (2023-07-27) + +- Rename flow_broadcast to flow_start +- Update editor to fix cases on result split +- Add new channel log types used by courier +- Update contact search widget for flow starts + +## v8.1.235 (2023-07-26) + +- Convert templates in dashboard, docs, globals, ivr, locations and notifications apps +- Use title-text for just overriding the text +- Restore missing msg box templates + +## v8.1.234 (2023-07-25) + +- Fix org export page +- Fix permissions for viewer for flow results + +## v8.1.233 (2023-07-25) + +- Simpliy convert_templates script +- Consistent title for initial page load +- Remove spa-title and spa-style +- Add archives to STORAGES + +## v8.1.232 (2023-07-24) + +- Do not set the max for y axis chart to allow that to be calculated +- Convert templates in the triggers app from haml + +## v8.1.231 (2023-07-21) + +- Simplify redis settings and organize settings better in sections + +## v8.1.230 (2023-07-20) + +- Tweak system check for storage settings to check different storages are configured +- Convert S3 log access to be via django storages +- Use pg_dump/restore from docker container in mailroom_db command so it's always correct version + +## v8.1.229 (2023-07-19) + +- Fix tickets list, to show compose properly on Firefox +- Add cpAddress parameter as optional for MTN channel type + +## v8.1.228 (2023-07-18) + +- Update Instagram docs broken link +- Allow initiating flow results download form the the flow labels filter view + +## v8.1.227 (2023-07-17) + +- Bump cryptography from 41.0.0 to 41.0.2 + +## v8.1.226 (2023-07-13) + +- Rework trimming cron tasks to use delete_in_batches +- Drop no longer used Binary Optional Data field + +## v8.1.225 (2023-07-13) + +- Fix icon for globals delete +- Migrate old Twilio channels using .bod to use .config instead +- Remove duplicate menu views in classifiers and channels apps + +## v8.1.224 (2023-07-12) + +- Add log_policy to channel + +## v8.1.223 (2023-07-11) + +- More tweaks to org deletion + +## v8.1.222 (2023-07-11) + +- Add delete_in_batches util function to improve org deletion +- Actually fix deletion of campaign events during org deletion + +## v8.1.221 (2023-07-11) + +- Fix deleting of campaign events and add more logging to org deletion + +## v8.1.220 (2023-07-10) + +- Delete is only for deleting child workspaces + +## v8.1.219 (2023-07-10) + +- Fix problems with org deletion + +## v8.1.218 (2023-07-07) + +- Update to flow editor with fix for ward cases + +## v8.1.217 (2023-07-06) + +- Convert haml files in contacts app +- Bump django from 4.2.2 to 4.2.3 + +## v8.1.216 (2023-07-05) + +- Add data migration to fix archived message counts for labels +- Convert haml templates in campaigns and classifiers apps + +## v8.1.215 (2023-07-05) + +- Add missing migration that rebuilds constraint on contact URNs +- Update channel log retention to 2 weeks +- Disable old 360 Dilalog channel type, and take the new integration out of beta + +## v8.1.214 (2023-07-03) + +- Update to psycopg3 non-binary +- Reference templates as html + +## v8.1.213 (2023-07-03) + +- Convert flows app to be hamless + +## v8.1.212 (2023-07-03) + +- Sorted group list when editing contacts +- Switch channel charts to load with json instead of embedded data + +## v8.1.211 (2023-06-28) + +- Fix Twilio channel update modal + +## v8.1.210 (2023-06-28) + +- Fix mangling of option attributes +- Save channel logs with channels/ prefix +- Add configurable agent access per contact field + +## v8.1.209 (2023-06-28) + +- Fix creating PublicFileStorage + +## v8.1.208 (2023-06-28) + +- Fix S3 channel logs paths to not start with slash +- Update to Django 4.2 + +## v8.1.207 (2023-06-27) + +- Convert some haml templates to html + +## v8.1.206 (2023-06-27) + +- Drop duplicate index +- Look for channel logs in S3 when not found in database +- Move tracking label counts to statement level triggers + +## v8.1.205 (2023-06-27) + +- Replace index on channellog.channel + +## v8.1.204 (2023-06-26) + +- Fix inline group created and broadcast action + +## v8.1.203 (2023-06-26) + +- Update contact action fix + +## v8.1.202 (2023-06-26) + +- Rework settings for S3 buckets + +## v8.1.201 (2023-06-23) + +- Support runtime locales in components + +## v8.1.200 (2023-06-23) + +- Update for flow editor text inputs with null values + +## v8.1.199 (2023-06-22) + +- Updates for select widget to behave with more standard form controls + +## v8.1.198 (2023-06-22) + +- Rollback components + +## v8.1.197 (2023-06-22) + +- Override the correct alpha3 code for Oromifa +- Update form components to use element internals +- Rework loading of channel logs so easier to fetch from S3 too + +## v8.1.196 (2023-06-21) + +- Improve ExternalURLField and don't assume http +- Use org import task to import flows + +## v8.1.195 (2023-06-19) + +- Name override for oro language +- Remove no longer used code relating to contact fields + +## v8.1.194 (2023-06-19) + +- Don't ignore user provided role for somleng shortcodes +- Fix flow export button height +- Fix import translation to use new UI +- Fix parent ID lookup in import geojson +- Support Dialog360 Cloud API channels + +## v8.1.193 (2023-06-14) + +- Add surveyor icon + +## v8.1.192 (2023-06-14) + +- Add icons for flows, fix issue with some spload fires + +## v8.1.191 (2023-06-13) + +- Broadcast update via wizard and updated list styling + +## v8.1.190 (2023-06-12) + +- Add agent_access to API fields endpoint +- Restrict agent users view of field values on API contacts endpoint +- Remove use of django tags inside javascript + +## v8.1.189 (2023-06-12) + +- Fix broken list view template +- Add djlint and latest django-hamlpy + +## v8.1.188 (2023-06-09) + +- Tweak contact field access backfill migration + +## v8.1.187 (2023-06-09) + +- Add ContactField.agent_access and backfill to view +- Use statement level triggers for tracking current node counts +- Remove old scheduled broadcast create view + +## v8.1.186 (2023-06-08) + +- Format api_root.html and fix errors +- Fix channel log pretty printing + +## v8.1.183 (2023-06-08) + +- Add djLint config +- Add basic wizard support + +## v8.1.182 (2023-06-08) + +- Support imports with Status column +- Make viewer role users a feature that can be toggled +- Allow exporting of blocked, stopped and archived contacts + +## v8.1.181 (2023-06-07) + +- Add redact_values for FBA and IG channel types +- Remove unused code for legacy UI contact read and list pages +- Rework channel log anonymization so even staff users have to explicitly break out of it +- Rework channel log rendering to start from JSONified version +- Fix adding queued braodcasts to Outbox view and counts +- Cleanup db triggers for broadcasts + +## v8.1.180 (2023-06-05) + +- Fix failed message resending and archived message deletion + +## v8.1.179 (2023-06-05) + +- Drop ChannelLog.msg and .call + +## v8.1.178 (2023-06-05) + +- Bump cryptography from 39.0.2 to 41.0.0 +- Stop reading from ChannelLog.msg and .call +- Use per-statement db triggers for system label counts + +## v8.1.177 (2023-06-02) + +- Remove dupe from changelog + +## v8.1.176 (2023-06-02) + +- Add some blocks on main templates + +## v8.1.175 (2023-06-02) + +- Add select all on list pages + +## v8.1.174 (2023-06-01) + +- Noop when releasing an already released org +- Rework and simplify channel count db triggers + +## v8.1.173 (2023-06-01) + +- Remove support for filtering channel logs by folder + +## v8.1.171 (2023-05-31) + +- Add index on channellog.uuid +- Impove and expose the call list view + +## v8.1.170 (2023-05-31) + +- Remove rendering of contact history as template now that new UI only consumes it as JSON +- Fix inbox msg type for Android channels + +## v8.1.169 (2023-05-30) + +- Allow call count backfill migration to be called offline +- Fix ivr call trigger migration +- Remove unused stuff from inbox views + +## v8.1.168 (2023-05-30) + +- Add data migration to backfill ivr call counts + +## v8.1.167 (2023-05-29) + +- Add DB triggers to track counts of calls as a new system label + +## v8.1.166 (2023-05-29) + +- Stop writing SystemLabelCount.is_archived so it can be dropped + +## v8.1.165 (2023-05-29) + +- Always write system label counts with is_archived=False and make field nullable + +## v8.1.164 (2023-05-29) + +- Add data migration to delete old system label counts for is_archived=true because they're no longer updated +- Fix getting FB business ID for WAC channels + +## v8.1.163 (2023-05-25) + +- Return empty sample/fields on preview_start endpoint until contactsearch component is updated + +## v8.1.162 (2023-05-25) + +- Add BroadcastCRUDL.Preview +- Fix broadcast send history template + +## v8.1.161 (2023-05-24) + +- User orgs based on request +- Switch brand array to dict +- Move plivo connect view to channel type + +## v8.1.160 (2023-05-19) + +- Fix field update and deleting with same key + +## v8.1.159 (2023-05-19) + +- Don't allow horizontal scroll by default + +## v8.1.158 (2023-05-19) + +- Fix scrolling for content pages without full height +- Tweak how we run python scripts in CI + +## v8.1.157 (2023-05-18) + +- Add ticket editing +- Remove old ticket assign view and support for notes with assignment +- Add ticket topic menu and resizer +- Move WAC connect view to the WhatsApp cloud channel type package +- Remove accounts formax from workspace view as it isn't needed with new UI + +## v8.1.156 (2023-05-17) + +- Update components for 302 fix +- Make post_url work identically to posterize + +## v8.1.155 (2023-05-17) + +- Better handling of post_url for spa content menu +- Really fix hiding surveyor form + +## v8.1.154 (2023-05-17) + +- Hide the surveyor password input and not just the help texti +- Fix URLs in JS files + +## v8.1.153 (2023-05-17) + +- Move channel type constants to the channel type class +- Don't show option to enter surveyor password if surveyor feature not enabled +- Scoped javascript for flow broadcast modal + +## v8.1.152 (2023-05-15) + +- Make js function name unique +- Fix no_nav extra-script blocks + +## v8.1.151 (2023-05-15) + +- Fix the API explorer scripts and styles blocks + +## v8.1.150 (2023-05-15) + +- Cleanup broken or unused posterized links +- Drop old flow start fields + +## v8.1.149 (2023-05-14) + +- Fix signups + +## v8.1.148 (2023-05-12) + +- Fix backwards compat for send message to somebody else + +## v8.1.147 (2023-05-12) + +- Fix flow refresh and global redirect hook + +## v8.1.146 (2023-05-12) + +- Add some null checks for frame selectors + +## v8.1.145 (2023-05-11) + +- Fix width for other views and posterize on choose + +## v8.1.144 (2023-05-11) + +- Fix login width +- Tweak Somleng claim blurb + +## v8.1.143 (2023-05-11) + +- Stop reading from old FlowStart fields +- Merge and clean up main frame +- Rename Twiml API channel to Somleng + +## v8.1.142 (2023-05-11) + +- Add base mixin for channel type specific views that gives access to the type class +- Update components and editor to support compose for somebody else +- Move vonage connect view to the channel type +- Allow deleting of archived triggers + +## v8.1.141 (2023-05-10) + +- Fix contacts title +- Fix vanilla landing +- Remove lessblock and replace with compiled css +- Bump django from 4.1.7 to 4.1.9 + +## v8.1.140 (2023-05-09) + +- Fix ticket padding +- Remove remaining spa files +- Add link to reset the latest credentials +- Preset channel connection + +## v8.1.139 (2023-05-09) + +- Add blocked icon + +## v8.1.138 (2023-05-09) + +- Update labeling to use temba-checkbox and remove jQuery +- Fix trim_channel_logs config and rework so task olny runs for an hour max +- Change test_db to create single org at a time + +## v8.1.137 (2023-05-09) + +- Add exclusions and params fields to FlowStart and start writing them + +## v8.1.136 (2023-05-09) + +- Don't include brand variables in less node + +## v8.1.135 (2023-05-09) + +- Remove references to old icon set +- Remove unused jquery bits and intercooler +- Remove bootstrap + +## v8.1.134 (2023-05-08) + +- Remove no longer used perms +- Remove any old non-spa templates not being extended by the spa version +- Remove is_spa logic from templates +- Remove old contact update fields views + +## v8.1.133 (2023-05-05) + +- Add default color + +## v8.1.132 (2023-05-05) + +- Remove settings turd + +## v8.1.131 (2023-05-05) + +- Remove old nav from landing page + +## v8.1.130 (2023-05-04) + +- Remove spa checking in views + +## v8.1.129 (2023-05-04) + +- Remove JSON view to list notifications now that has moved to the internal API +- Remove non-spa items from content menus + +## v8.1.128 (2023-05-03) + +- Fix contact import + +## v8.1.127 (2023-05-03) + +- Remove support for adding bulk sender delegate channels +- Remove ability to create IVR delegates for android channels +- Remove org home view altogether and update links to point to workspace view + +## v8.1.126 (2023-05-03) + +- Change cookie checking for UI so that we always default to new UI +- Add color picker widget +- Remove ability to store twilio credentials on the org + +## v8.1.125 (2023-05-02) + +- Tweak notifications index to match API endpoint +- Add new internal API with a notifications endpoint +- Use DRF defaults for STRICT_JSON and UNICODE_JSON +- Remove unused .api URL suffixes + +## v8.1.124 (2023-05-01) + +- Make contact.modify work with new and old format +- Make ticket a reserved field name + +## v8.1.123 (2023-04-27) + +- Hide Open Ticket option on contact read page if there's already an open a ticket +- Rework soft and hard msg deleting to be more performant + +## v8.1.122 (2023-04-26) + +- Remove db constriants on Msg.flow and Msg.ticket + +## v8.1.121 (2023-04-26) + +- Tweak migration dependency +- Show counts of tickets by topic on tickets menu + +## v8.1.120 (2023-04-25) + +- Add topic counts to the API endpoint +- Add undocumented param to contacts API endpoint which allows URNs to be expanded +- Data migration to backfill ticket counts by topic + +## v8.1.119 (2023-04-25) + +- Start writing ticket counts for topics + +## v8.1.118 (2023-04-24) + +- Fix deleting of flows and tickets which are referenced by messages +- Fix pattern match for folder uuid +- Stop writing TicketCount.assignee + +## v8.1.117 (2023-04-24) + +- Stop reading from TicketCount.assignee + +## v8.1.116 (2023-04-21) + +- Add more channel icons + +## v8.1.115 (2023-04-21) + +- Update icons +- Add ticket topic folders + +## v8.1.114 (2023-04-20) + +- Add migration to backfill TicketCount.scope + +## v8.1.113 (2023-04-20) + +- Add scope field to TicketCount and start writing + +## v8.1.112 (2023-04-20) + +- Dropdowns for slow clickers +- Tighten up animations +- Use services for redis, elastic and postgres in CI + +## v8.1.111 (2023-04-18) + +- Fix and archive keyword triggers with no match_type + +## v8.1.110 (2023-04-18) + +- Prefetch flows on message views and make titles consistent + +## v8.1.109 (2023-04-18) + +- Add links for menu, add flow badge, update label badges +- Remove Chikka channel type which no longer exists +- Update mailroom_db command to allow connecting to non-file socket postgres + +## v8.1.108 (2023-04-17) + +- Add ticket field to msg model + +## v8.1.107 (2023-04-13) + +- Allow deleting of groups used in triggers + +## v8.1.106 (2023-04-13) + +- Don't show topics on tickets until clicked + +## v8.1.105 (2023-04-12) + +- Fix js items on context menus + +## v8.1.104 (2023-04-11) + +- Do not display schedule events for archived triggers +- Don't require db superuser for test_db command +- Make ticket banner expandable + +## v8.1.103 (2023-04-10) + +- Fix urls when searching and paging +- Follow message on auto assign for unassigned folder + +## v8.1.102 (2023-04-10) + +- Add contact details pane, hide empty tabs +- Auto assign tickets when sending messages +- Add nicer ticket assignment using temba-contact-tickets component +- Fix deleting of orgs with incidents + +## v8.1.101 (2023-04-06) + +- Add field search handler on tickets + +## v8.1.100 (2023-04-06) + +- Add fields to tickets + +## v8.1.99 (2023-04-06) + +- Add test util to make it easier to mess with brands +- Drop Org.stripe_customer_id + +## v8.1.98 (2023-04-06) + +- Link contact name on tickets to the contact page if permitted +- Drop Org.plan, plan_start and plan_end + +## v8.1.97 (2023-04-05) + +- Pull tickets out of contact chat +- Scheduled messages to broadcasts with compose widget + +## v8.1.96 (2023-04-03) + +- Stop reading Org.plan and .plan_end +- Bump redis from 4.5.3 to 4.5.4 + +## v8.1.95 (2023-03-31) + +- Fix temba-store race on load + +## v8.1.94 (2023-03-29) + +- Bump version of openpyxl + +## v8.1.93 (2023-03-29) + +- Update Excel reading dependencies + +## v8.1.92 (2023-03-29) + +- Use unittests.mock.Mock in tests instead of custom mock_object + +## v8.1.91 (2023-03-28) + +- Upgrade redis library version + +## v8.1.90 (2023-03-27) + +- NOOP instead of assert if archiving msg which is already archived etc + +## v8.1.89 (2023-03-27) + +- Do not fail to release channel when missing mtn subscription id in config +- Add incident type for org suspension + +## v8.1.88 (2023-03-23) + +- Fix suspending and unsuspending orgs so that it correctly updates children +- Use a name for the active org that doesn't collide + +## v8.1.87 (2023-03-23) + +- Manually fix version number + +## v8.1.86 (2023-03-23) + +- Fix scrolling on WhatsApp templates page + +## v8.1.85 (2023-03-23) + +- Handle short screens better on run list page + +## v8.1.84 (2023-03-22) + +- Update to coverage 7.x + +## v8.1.83 (2023-03-22) + +- Use onSpload to wire handlers on account form + +## v8.1.82 (2023-03-22) + +- Support setting and removing the subscription URL for MTN channels + +## v8.1.81 (2023-03-21) + +- Update ruff and isort + +## v8.1.80 (2023-03-21) + +- Update black + +## v8.1.79 (2023-03-20) + +- Add mouseover text for temba-date +- Reload page on org mismatch +- Use embedded title instead of response header + +## v8.1.78 (2023-03-20) + +- Add globals to new ui +- Make it harder to accidentally delete an org +- Rewrite org deletion test and fix deletion issues + +## v8.1.77 (2023-03-16) + +- Limit groups to a single line on contact page + +## v8.1.76 (2023-03-16) + +- Remove unused fields and indexes on broadcast model +- Reload page on version mismatch +- Add support for MTN Developer Portal channel + +## v8.1.75 (2023-03-16) + +- Add menu path for org export and import +- Fix legacy goto function for old UI +- Warn users who go back to the old interface +- Remove support for broadcasts with associated tickets + +## v8.1.74 (2023-03-15) + +- Show version number on public index page +- Add poetry plugin to maintain version number in temba/**init**.py +- Fix textinput inner scrolling + +## v8.1.73 (2023-03-15) + +- Stop returning type=flow|inbox on messages endpoint +- Cleanup location app models + +## v8.1.72 (2023-03-14) + +- Convert Org.config and Channel.config to be real JSON + +## v8.1.71 (2023-03-14) + +- Strip out invalid HTTP header characters from page title response headers +- Fix mailroom db command to patch uuid generation after migrations are run +- Expose flow on messages API endpoint + +## v8.1.70 (2023-03-13) + +- Broad support for meta click for new tabs +- Make Org.config and Channel.config non-null + +## v8.1.69 (2023-03-13) + +- Simplify use of config fields on channel update forms +- Fix alias editor to use the new UI frame +- Support updating Twilio credentials for T, TMS and TWA channels + +## v8.1.68 (2023-03-13) + +- Rework messages and broadcasts API endpoints to accept media ojects UUIDs as attachments +- Make Msg.uuid and msg_type non-null + +## v8.1.67 (2023-03-10) + +- Fix layering for menu + +## v8.1.66 (2023-03-09) + +- Fix initial editor load +- Schedule message validation + +## v8.1.65 (2023-03-09) + +- Update endpoints for messages and media + +## v8.1.64 (2023-03-08) + +- Tweak layout for editor +- Cleanup fail_old_messages task. Use correct statuses and return number failed. + +## v8.1.63 (2023-03-08) + +- Adjust export download page for new UI +- Make media list page (still staff only) filter by org and add index + +## v8.1.62 (2023-03-08) + +- Small z-index tweak + +## v8.1.61 (2023-03-07) + +- Tweak simulator placement in new ui + +## v8.1.60 (2023-03-07) + +- Encourage users to try the new interface +- Add lightbox for contact history + +## v8.1.59 (2023-03-07) + +- Rework code depending on msg_type=I|F + +## v8.1.58 (2023-03-07) + +- Add missing channels migration +- Use msg.created_by if set in ticket list view +- Remove SMS type channel alerts + +## v8.1.57 (2023-03-06) + +- Move index on msg.external_id onto the model + +## v8.1.56 (2023-03-06) + +- Fix soft deleting of scheduled messages so schedule is deleted too +- Stop saving JSONAsTextField values as null for empty dicts and lists +- Update select s3 usage for msg exports to not rely on type=inbox|flow +- Add created_by to Msg and populate on events in contact histories + +## v8.1.55 (2023-03-02) + +- Fix import for sync fcm task +- Create new filters and partial indexes for Inbox, Flows and Archived + +## v8.1.54 (2023-03-02) + +- Fix enter on compose + +## v8.1.53 (2023-03-01) + +- Add compose component to contact chat +- Pixel tweak on contact read page +- Move more Android relayer code out of Channel + +## v8.1.52 (2023-03-01) + +- Simplify what we display for Android channels on read page + +## v8.1.50 (2023-02-28) + +- Make spload universal + +## v8.1.49 (2023-02-28) + +- Make spload work on formax pages + +## v8.1.48 (2023-02-28) + +- Add more goto(event) +- Fix content differing from page-load vs inline load +- Add page title for spa response headers +- Clean up subtitles on spa pages +- Add link to flow starts (and clean up list page styling) +- Add link for webhook calls (and cleanup styling here too) +- Update styling for log pages for both old / new ui + +## v8.1.47 (2023-02-27) + +- Be less clever with page titles. Fix label js errors. +- Make sure tests can run without making requests to external URLs +- Unpublicize folder=incoming on messages API docs and re-add index with status=H + +## v8.1.46 (2023-02-23) + +- Fix external links in old ui + +## v8.1.45 (2023-02-23) + +- Fix external channel links +- No longer intercept clicks in spa-content +- Cleanup Channel model fields +- Fix channel claim external URLs in new UI + +## v8.1.44 (2023-02-23) + +- Exclude PENDING messages in contact history and API by org and contact +- Add -id to msg fetch ordering in Contact.get_history +- For both messages and tickets, replace the default indexes on org and contact with indexes that match the API ordering + +## v8.1.43 (2023-02-23) + +- Use statement level db trigger for broadcast msg counts +- Update django to 4.1.7 + +## v8.1.42 (2023-02-22) + +- Only look at queued messages when syncing android channels +- Re-add Msg.STATUS_INITIALIZING to use for outgoing messages which fail to queue +- Include STATUS_ERRORED messages in Outbox views + +## v8.1.41 (2023-02-22) + +- Remove suprious property + +## v8.1.40 (2023-02-22) + +- Fix contact imports in new ui +- Fix menu refresh race +- Remove window.lastFetch +- Adjust menu paths for new UI channel views +- Use SpaMixin to more channels extra views + +## v8.1.39 (2023-02-22) + +- Move Msg.update into android package +- Make text optional on broadcasts endpoint (messages need text or attachments) + +## v8.1.38 (2023-02-21) + +- Fix dashboard not loading when content +- Fix handling FCM sync failure + +## v8.1.37 (2023-02-21) + +- Don't lookup related fields in API if lookup value type is wrong +- Update django 4.0.10 +- Fetching sent folder on messages endpoint should return messages ordered by -sent_on same as UI +- Exclude unhandled messages from Incoming folder on messages API endpoint +- More agressive menu refreshing +- Move much of the old android relayer code into its own package +- Add media API endpoint, undocumented for now +- Open up new UI access to everyone + +## v8.1.36 (2023-02-20) + +- Cleanup use of validators in the API +- Add support for Msg.TYPE_TEXT to be used (for now) for outgoing messages + +## v8.1.35 (2023-02-17) + +- Add org start redirection view +- Convert Attachment to be a dataclass +- Rework msg write serializer to create a transient Msg instance that the read serializer can use without hitting the db +- Add unpublicized API endpoint to send a single message +- Add msg_send to mailroom client + +## v8.1.34 (2023-02-16) + +- Drop raw_urns field on Broadcast +- Pass group id instead of uuid to contact_search mailroom endpoint +- Remove unused expression_migrate from mailroom client + +## v8.1.33 (2023-02-15) + +- Fix routing of current workspace to settings +- Add Broadcast.urns which matches the JSON and FlowStart.urns + +## v8.1.32 (2023-02-14) + +- Drop Broadcast.urns and .send_all + +## v8.1.30 (2023-02-13) + +- Fix keyword triggers match type + +## v8.1.29 (2023-02-13) + +- Fix omnibox search for anon org to allow search by contact name +- Prepare to drop Broadcast.send_all and .urns + +## v8.1.27 (2023-02-10) + +- Move all form text from Trigger model to forms +- Add migration to convert URNs to contacts on scheduled broadcasts + +## v8.1.26 (2023-02-10) + +- Remove returning specific URNs from omniboxes and instead match contacts by URN +- Rework spa menu eliminate mapping + +## v8.1.25 (2023-02-09) + +- Remove support for unused v1 omnibox format +- Update broadcasts API endpoint to support attachments + +## v8.1.24 (2023-02-08) + +- Update to latest cryptography library +- Add task to interrupt flow sessions after 90 days + +## v8.1.23 (2023-02-06) + +- Fix flow results redirecting to it's own page +- Make sure WA numbers can only be claimed once + +## v8.1.22 (2023-02-06) + +- Update to latest django to get security fix + +## v8.1.21 (2023-02-06) + +- Fix export > import path on new ui +- Fix login redirects from pjax calls + +## v8.1.20 (2023-02-02) + +- Add servicing menu on org read + +## v8.1.19 (2023-02-01) + +- Add Msg.quick_replies +- Add Broadcast.query +- More generic servicing for staff users + +## v8.1.18 (2023-02-01) + +- Drop un-used Media.name field + +## v8.1.17 (2023-01-31) + +- Fix modax from menu bug + +## v8.1.15 (2023-01-30) + +- Add new org chooser with avatars in new UI +- Add dashboard to menu in new UI + +## v8.1.14 (2023-01-27) + +- Add ordering support for filters +- Fix redirect ping pong when managing orgs +- Tweak inspect_flows command to report spec veresion mismatches + +## v8.1.13 (2023-01-26) + +- Update flow editor + +## v8.1.12 (2023-01-26) + +- Add locale field to Msg + +## v8.1.11 (2023-01-25) + +- Add migration to alter flow language field to first update any remaining flows with 'base' + +## v8.1.10 (2023-01-25) + +- Require flow and broadcast base languages to 3 letters +- Require broadcast.translations to be non-null + +## v8.1.9 (2023-01-25) + +- Drop unused broadcast fields + +## v8.1.8 (2023-01-24) + +- Make Broadcast.text nullable and stop writing it + +## v8.1.7 (2023-01-24) + +- Stop reading from Broadcast.text + +## v8.1.6 (2023-01-23) + +- Fix campaign imports so we don't import base as a language +- Increase max-width for channel configuration page +- Support bandwidth channel type + +## v8.1.5 (2023-01-23) + +- Data migration to backfill broadcast.translations and replace base with und + +## v8.1.4 (2023-01-20) + +- Update campaign message events with language base +- Make servicing to use posterize + +## v8.1.3 (2023-01-19) + +- Tweak broadcasts API endpoint so it filters by is_active and hits index +- Fix indexes used for tickets API endpoint +- Remove unused indexes on contacts_contact +- Bump engine version to 13.2 + +## v8.1.2 (2023-01-19) + +- Fixes for content menu changes +- Fix test_db to create orgs with flow languages + +## v8.1.1 (2023-01-18) + +- Restrict creating surveyor flows unless that is enabled as a feature +- Always create braodcasts with status = QUEUED, create index for fetching queued broadcasts +- Add new translations JSON field to broadcasts and start writing it +- Remove support for creating broadcasts with legacy expressions +- New content menu component + +## v8.1.0 (2023-01-17) + +- Update contact import styling +- Implement squashed migrations +- Stop trimming flow starts as this will be handled by archiver + +## v8.0.1 (2023-01-12) + +- Tweak migration dependencies to ensure clean installs run them in order that works +- Add empty migrations required for squashing + +## v8.0.0 (2023-01-10) + +- Update deps + +## v7.5.149 (2023-01-10) + +- Drop FlowRunCount model + +## v7.5.148 (2023-01-09) + +- Stop squashing FlowRunCount +- Add misisng index on FlowRunStatusCount and rework get_category_counts to be deterministic +- Stop creating flows_flowruncount rows in db triggers and remove unsquashed index +- Bump required pg_dump version for mailroom_db command to 14 + +## v7.5.147 (2023-01-09) + +- Use und (Undetermined) as default flow language and add support for mul (Multiple) +- Disallow empty and null flow languages, change default spec version to zero +- Tweak migrate_flows to have smaller batch size and order by org to increase org assets cache hits + +## v7.5.146 (2023-01-05) + +- Cleanup migrate_flows command and stop excluding flows with version 11.12 +- Change sample flows language to eng +- Refresh menu when tickets are updated +- Fix frame-top analytics includes +- Fix transparency issue with content menu on editor page + +## v7.5.145 (2023-01-04) + +- Update flow editor to include fix for no expiration route on ivr +- Stop defaulting to base for new flow languages + +## v7.5.144 (2023-01-04) + +- Ensure all orgs have at least one flow language +- Switch to using temba-date in more places + +## v7.5.143 (2023-01-02) + +- Update mailroom version for CI +- Tidy up org creation (signups and grants) + +## v7.5.142 (2022-12-16) + +- Fix org listing when org has no users left + +## v7.5.141 (2022-12-16) + +- Fix searching for orgs on manage list page +- Fix highcharts colors +- Fix invalid template name + +## v7.5.140 (2022-12-15) + +- Fix flow results page + +## v7.5.136 (2022-12-15) + +- Tell codecov to ignore static/ +- Switch label action buttons to use temba-dropdown + +## v7.5.135 (2022-12-13) + +- Fix content menu display issues + +## v7.5.134 (2022-12-13) + +- Switch to yarn + +## v7.5.133 (2022-12-12) + +- Bump required python version to 3.10 + +## v7.5.132 (2022-12-12) + +- Support Python 3.10 + +## v7.5.131 (2022-12-09) + +- Replace .gauge on analytics backend with .gauges which allows backends to send guage values in bulk +- Remove celery auto discovery for jiochat and wechat tasks which were removed + +## v7.5.130 (2022-12-09) + +- Record cron time in analytics + +## v7.5.129 (2022-12-08) + +- Cleanup cron task names +- Split task to trim starts and sessions into two separate tasks +- Expose all status counts on flows endpoint +- Read from FlowRunStatusCount instead of FlowRunCount +- Track flow start counts in statement rather than row level trigger + +## v7.5.128 (2022-12-07) + +- Record cron task last stats in redis +- Switch from flake8 to ruff +- Add data migration to convert exit_type counts to status counts + +## v7.5.127 (2022-12-07) + +- Fix counts for triggers on the menu + +## v7.5.126 (2022-12-06) + +- Add new count model for run statuses managed by by-statement db triggers + +## v7.5.125 (2022-12-05) + +- Tweak index used to find messages to retry so that it includes PENDING messages + +## v7.5.124 (2022-12-05) + +- Update to latest components +- More updates for manage pages + +## v7.5.123 (2022-12-02) + +- Fix bulk labelling flows + +## v7.5.122 (2022-12-02) + +- Add user read page +- Latest components +- Rework notification and incident types to function more like other typed things +- Add org timezone to manage page +- Remove no longer used group list view +- Log celery task completion by default and rework some tasks to return results included in the logging +- Refresh browser on field deletion in legacy +- Show org plan end as relative time +- Don't show location field types as options on deploys where locations aren't enabled + +## v7.5.121 (2022-11-30) + +- Fix loading of notification types + +## v7.5.120 (2022-11-30) + +- Rework notification types to work more like channel types +- Update API fields endpoint to use name and type for writes as well as reads +- Remove unused field on campaign events write serializer +- Change undocumented pinned field on fields endpoint to be featured +- Add usages field to fields API endpoint, as well as name and type to replace label and value_type +- Add Line error reference URL + +## v7.5.119 (2022-11-29) + +- Fix flow label in list buttons +- Fix editor StartSessionForm bug for definitions without exclusions +- Remove no longer needed check for plan=parent + +## v7.5.118 (2022-11-28) + +- Add telgram and viber error reference URLs +- Make Org.plan optional +- Add support to create new workspaces from org chooser + +## v7.5.117 (2022-11-23) + +- Update to latest editor +- Drop Org.is_multi_org and Org.is_multi_user which have been replaced by Org.features + +## v7.5.116 (2022-11-23) + +- Fix flow label name display + +## v7.5.115 (2022-11-22) + +- Default to no features on new child orgs +- Add features field to org update UI + +## v7.5.114 (2022-11-22) + +- Add Org.features and start writing it +- Add error ref url for FBA and IG +- Update temba-components to get new link icon +- Cleanup msg status constants +- Always create new orgs with default plan and only show org_plan for non-child orgs + +## v7.5.113 + +- Stop reading Label.label_type and make nullable +- Remove all support for labels with parents + +## v7.5.112 + +- Remove OrgActivity + +## v7.5.111 + +- Delete associated exports when trying to delete message label folders + +## v7.5.110 + +- Data migration to flatten msg labels + +## v7.5.109 + +- Remove logic for which plan to use for a new org + +## v7.5.108 + +- Tweak how get_new_org_plan is called +- Move isort config to pyproject +- Remove no longer used workspace plan + +## v7.5.107 + +- Treat parent and workspace plans as equivalent + +## v7.5.106 + +- Tweak flow label flatten migration to not allow new names to exceed 64 chars + +## v7.5.105 + +- Display channel logs with earliest at top + +## v7.5.104 + +- Remove customized 500 handler +- Remove sentry support +- Data migration to flatten flow labels +- Fix choice of brand for new orgs and move plan selection to classmethod +- Catch CSV corrupted errors + +## v7.5.103 + +- Some people don't care for icon constants +- Remove shim for browsers older than IE9 +- Remove google analytics settings + +## v7.5.102 + +- Remove google analytics + +## v7.5.101 + +- Fix Org.promote + +## v7.5.100 + +- Add Org.promote utility method +- Simplify determining whether to rate limit an API request by looking at request.auth +- Data migration to simplify org hierarchies + +## v7.5.99 + +- Rename security_settings.py > settings_security.py for consistency +- Drop Org.uses_topups, TopUp, and Debit +- Update to latest components +- Remove unused settings +- Remove TopUp, Debit and Org.uses_topups + +## v7.5.98 + +- Drop triggers, indexes and functions related to topups + +## v7.5.97 + +- Update mailroom_db command to use postgresql 13 +- Remove User.get_org() +- Always explicitly provide org when requesting a user API token +- Remove Msg.topup, TopUpCredits, and CreditAlert +- Test against latest redis 6.2, elastic 7.17.7 and postgres 13 + 14 + +## v7.5.96 + +- Remove topup credits squash task from celery beat + +## v7.5.95 + +- Update API auth classes to set request.org and use that to set X-Temba-Org header +- Use dropdown for brand field on org update form +- Remove topups + +## v7.5.94 + +- Add missing migration +- Remove support for orgs with brand as the host +- Remove brand tiers + +## v7.5.93 + +- Fix new event modal listeners +- Re-add org plan and plan end to update form +- Add png of rapidpro logo +- Update mailroom_db and test_db commands to set org brand as slug +- Add data migration to convert org.brand to be the brand slug + +## v7.5.92 + +- Create cla.yml +- Rework branding to not require modifying what is in the settings + +## v7.5.91 + +- Remove outdated contributor files + +## v7.5.90 + +- Update flow editor +- Remove unused fields from ChannelType +- Allow non-beta users to add WeChat channels + +## v7.5.89 + +- Properly truncate the channel name when claiming a WAC channel +- Fix not saving selected date format to new child org +- Add redirect from org_create_child if org has a parent +- Remove unused Org.get_account_value +- Don't allow creation of child orgs within child orgs +- Remove low credit checking code + +## v7.5.88 + +- Remove the token refresh tasks for jiochat and wechat channels as courier does this on demand +- Remove Stripe and bundles functionality + +## v7.5.87 + +- Remove unused segment and intercom dependencies +- Remove unused utils code +- Update TableExporter to prepare values so individual tasks don't have to +- Update versions of mailroom etc that we use for testing +- Add configurable group membership columns to message, ticket and results exports (WIP) + +## v7.5.86 + +- Remove no-loner used credit alert email templates +- Drop ChannelConnection + +## v7.5.85 + +- Remove unschedule option from scheduled broadcast read page +- Only show workspace children on settings menu +- Allow adding Android channel when its number is used on a WhatsApp channel +- Remove credit alert functionality +- Add scheduled message delete modal + +## v7.5.84 + +- No link fields on sub org page + +## v7.5.83 + +- Update telegram library which doesn't work with Python 3.10 +- Add user child workspace management +- Remove topup management views + +## v7.5.82 + +- Add JustCall channel type + +## v7.5.81 + +- Always show plan formax even for orgs on topups plan + +## v7.5.80 + +- Remove task to suspend topups orgs + +## v7.5.79 + +- Add new indexes for scheduled broadcasts view and API endpoint +- Update broadcast_on_change db trigger to check is_active +- Use database trigger to prevent status changes on flow sessions that go from exited to waiting + +## v7.5.78 + +- Remove old crisp templates +- Added Broadcast.is_active backfill migration + +## v7.5.77 + +- Proper redirect when removing channels +- Fix api header when logged out +- Take features out of branding and make it deployment level and remove api_link +- Get rid of flow_types as a branding setting + +## v7.5.76 + +- Tweak migration to convert missed call triggers to ignore archived triggers + +## v7.5.75 + +- Add Broadcast.is_active and set null=true and default=true +- Remove channel_status_processor context processor +- Add data migration to delete or convert missed call triggers + +## v7.5.74 + +- Fix webhook list page to not show every call as an error +- Small styling tweaks for api docs +- Remove fields from msgs event payloads that are no longer used + +## v7.5.73 + +- Update api docs to be nav agnostic +- Rewrite API Explorer to be vanilla javascript +- Use single permissions for all msg and contact list views +- Rework UI for incoming call triggers to allow selecting non-voice flows +- Remove send action from messages, add download results for flows +- Unload flow editor when navigating away + +## v7.5.72 + +- Always put service menu options at end of menu in new group + +## v7.5.71 + +- More appropriate login page, remove legacy textit code + +## v7.5.70 + +- Fix which fields should be on org update modal +- Honor brand config for signup + +## v7.5.69 + +- Fix race on editor load + +## v7.5.68 + +- Add failed reason for channel removed +- Remove no longer used channels option from interrupt_sessions task + +## v7.5.67 + +- Interrupt channel by mailroom task + +## v7.5.66 + +- Remove need for jquery on spa in-page loads +- Remove key/secret hardcoding for boto session + +## v7.5.65 + +- Queue relayer messages with channel UUID and id +- No nouns for current object in menus except for New +- Add common contact field inclusion to exports +- Fix new scheduled message menu option +- Fix releasing other archive files to use proper pagination + +## v7.5.64 + +- Add an unlinked call list page +- Show channel log links on more pages to more users + +## v7.5.63 + +- Fix handling of relayer messages +- Add missing email templates for ticket exports + +## v7.5.62 + +- Add attachment_fetch as new channel log type + +## v7.5.61 + +- Fix claiming vonage channels for voice +- Better approach for page titles from the menu +- Fix layout for ticket menu in new ui + +## v7.5.60 + +- Fix the flow results export modal + +## v7.5.59 + +- Delete attachments from storage when deleting messages +- Add base export class for exports with contact data +- Actually make date range required for message exports (currently just required in UI)) +- Add date range filtering to ticket and results exports +- Add ticket export (only in new UI for now) + +## v7.5.58 + +- Add twilio and vonage connection formax entries in new UI +- Update both main menu and content menus to align with new conventions +- Gate new UI by Beta group rather than staff +- Don't show new menu UIs until they're defined + +## v7.5.57 + +- Move status updates into update contact view +- Some teaks to rendering of channel logs +- Cleanup use of channelconnection in preparation for dropping + +## v7.5.56 + +- Really really fix connection migration + +## v7.5.55 + +- Really fix connection migration + +## v7.5.54 + +- Fix migration to convert connections to calls + +## v7.5.53 + +- Add data migration to convert channel connections to calls + +## v7.5.52 + +- Replace last non-API usages of User.get_org() +- Use new call model in UI + +## v7.5.51 + +- Add new ivr.Call model to replace channels.ChannelConnection + +## v7.5.50 + +- Drop no-longer used ChannelLog fields +- Drop Msg.logs (replaced by .log_uuids) +- Drop ChannelConnection.connection_type + +## v7.5.49 + +- Fix test failing because python version changed +- Allow background flows for missed call triggers +- Different show url for spa and non-spa tickets +- Update editor to include fix for localizing categories for some splits +- Add data migration to delete existing missed call triggers for non-message flows +- Restrict Missed Call triggers to messaging flows + +## v7.5.48 + +- Stop recommending Android, always recommend Telegram +- Drop IVRCall proxy model and use ChannelConnection consistently +- Add migration to delete non-IVR channel connections +- Fix bug in user releasing and remove special superuser handling in favor of uniform treatment of staff users + +## v7.5.47 + +- Switch to temba-datepicker + +## v7.5.46 + +- Fix new UI messages menu + +## v7.5.45 + +- Replace some occurences of User.get_org() +- Add new create modal for scheduled broadcasts + +## v7.5.44 + +- Add data migration to cleanup counts for SystemLabel=Calls +- Tweak ordering of Msg menu sections +- Add slack channel + +## v7.5.43 + +- Include config for mailroom test db channels +- Remove Calls from msgs section +- Update wording of Missed Call triggers to clarify they should only be used with Android channels +- Only show Missed Call trigger as option for workspaces with an Android channel +- Change ChannelType.is_available_to and is_recommended_to to include org + +## v7.5.42 + +- Add data migration to delete legacy channel logs +- Drop support for channel logs in legacy format + +## v7.5.41 + +- Fix temba-store + +## v7.5.40 + +- Tweak forgot password success message + +## v7.5.39 + +- Add log_uuids field to ChannelConnection, ChannelEvent and Msg +- Improve `trim_http_logs_task` performance by splitting the query + +## v7.5.38 + +- Add codecov token to ci.yml +- Remove unnecessary maxdiff set in tests +- Fix to allow displaying logs that timed out +- Add HttpLog util and use to save channel logs in new format +- Add UUID to channel log and msgs + +## v7.5.37 + +- Show servicing org + +## v7.5.36 + +- Clean up chooser a smidge + +## v7.5.35 + +- Add org-chooser +- Refresh channel logs +- Add channel uuid to call log url +- Fix history state on tickets and contacts +- Update footer +- Add download icons for archives +- Fix create flow modal opener +- Flow editor embed styling +- Updating copyright dates and TextIt name (dba of Nyaruka) + +## v7.5.34 + +- Use elapsed_ms rather than request_time on channel log templates +- Update components (custom widths for temba-dialog, use anon_display where possible) +- Switch to temba-dialog based attachment viewer, remove previous libs +- Nicer collapsing on flow list columns +- Add overview charts for run results + +## v7.5.33 + +- ChannelLogCRUDL.List should use get_description so that it works if log_type is set +- Tweak channel log types to match what courier now creates +- Check for tabs after timeouts, don't auto-collapse flows +- Add charts to analytics tab + +## v7.5.32 + +- Update components with label fix + +## v7.5.31 + +- Add flow results in new UI + +## v7.5.30 + +- Remove steps for add WAC credit line to businesses + +## v7.5.29 + +- Fix servicing of channel logs + +## v7.5.28 + +- Stop writing to unused media name field +- Add missing C Msg failed reason +- Add anon-display field to API contact results if org is anon and make urn display null + +## v7.5.27 + +- Revert change to Contact.Bulk_urn_cache_initialize to have it set org on contacts + +## v7.5.26 + +- Don't set org on bulk initialized contacts + +## v7.5.25 + +- Fix filtering on channel log call page +- Add anon_display and use that when org is anon instead of using urn_display for anon id +- Add urn_display to contact reference on serialized runs in API + +## v7.5.24 + +- Fix missing service end button + +## v7.5.23 + +- Update to latest floweditor +- Add new ChannelLog log type choices and make description nullable +- Fix more content menus so that they can be fetched as JSON and add more tests + +## v7.5.22 + +- Remove unused policies.policy_read perm +- Replace all permission checking against Customer Support group with is_staff check on user + +## v7.5.21 + +- Allow views with ContentMenuMixin to be fetched as JSON menu items using a header +- Add new fields to channel log model and start reading from them if they're set + +## v7.5.20 + +- Update the links for line developers console on the line claim page +- Rework channel log details views into one generic one, one for messages, one for calls + +## v7.5.19 + +- Rework channel log rendering to use common HTTPLog template +- Fix titles on channel, classifier and manage logins pages + +## v7.5.18 + +- Workspace and user management in new UI + +## v7.5.17 + +- Show send history of scheduled broadcasts in correct order +- Only show option to delete runs to users who have that perm, and give editors that perm +- Update deps + +## v7.5.16 + +- Fixed zaper page title +- Validate channel name is not more than 64 characters +- Added 'authentication' to the temba anchor URL text + +## v7.5.15 + +- Fix URL for media uploads which was previously conflicting with media directory + +## v7.5.14 + +- Deprecate Media.name which can always be inferred from .path +- Improve cleaning of media filenames +- Convert legacy UUID fields on exports and labels +- Request instagram_basic permission for IG channels + +## v7.5.11 + +- Don't allow creating of labels with parents or editing labels to have a parent +- Rework the undocumented media API endpoint to be more specific to surveyor attachments +- Add MediaCRUDL with upload and list endpoints +- Remove requiring instagram_basic permission + +## v7.5.10 + +- Remove Media.is_ready, fix setting .status on alternates, add limit for upload size +- Rework ContentMenuMixin to put the menu in the context, and include new and legacy formats + +## v7.5.9 + +- Add status field to Media, move primary index to UUID field + +## v7.5.8 + +- Update floweditor +- Convert all views to use ContentMenuMixin instead of get_gear_links +- Add decorator to mock uuid generation in tests +- Process media uploads with ffmpeg in celery task + +## v7.5.7 + +- Add constraint to ensure non-waiting/active runs have exited_on set +- Add constraint to ensure non-waiting sessions have an ended_on + +## v7.5.6 + +- Remove unused upload_recording endpoint +- Add Media model + +## v7.5.5 + +- Remaining fallback modax references +- Add util for easier gear menu creation +- Add option to interrupt a contact from read page + +## v7.5.4 + +- Fix scripts on contact page start modal +- Add logging for IG channel claim failures +- Add features to BRANDING which determines whether brands have access to features +- Sort permissions a-z +- Fix related names on Flow.topics and Flow.users and add Topic.release +- Expose opened_by and opened_in over ticket API + +## v7.5.3 + +- Fix id for custom fields modal + +## v7.5.2 + +- Fix typo on archive button +- Only show active ticketers and topics on Open Ticket modal +- Add data migration to fix non-waiting sessions with no ended_on + +## v7.5.1 + +- Allow claiming WAC test numbers +- Move black setting into pyproject.toml +- Add Open Ticket modal view to contact read page + +## v7.5.0 + +- Improve user list page +- Add new fields to Ticket record who or what flow opened a ticket +- Refresh menu on modax redircts, omit excess listeners from legacy lists +- Fix field label vs name in new UI +- Add start flow bulk action in new UI +- Show zeros in menu items in new UI +- Add workspace selection to account page in new UI +- Scroll main content pane up on page replacement in new UI + +## v7.4.2 + +- Update copyright notice +- Update stable versions + +## v7.4.1 + +- Update locale files + +## v7.4.0 + +- Remove superfulous Beta group perm +- Update new UI opt in permissions +- More tweaks to WhatsApp Cloud channel claiming + +## v7.3.79 + +- Add missing Facebook ID + +## v7.3.78 + +- Add button to allow admin to choose more FB WAC numbers + +## v7.3.77 + +- Add contact ticket list in new UI +- Fix permissions to connect WAC +- Register the WAC number in the activate method + +## v7.3.76 + +- Add the Facebook dialog login if the token is not submitted successfully on WAC org connect +- Fix campaigns archive and activate buttons +- Update to latest Django +- Only display WA templates that are active +- Update flow start dialog to use start preview endpoint +- Add start flow bulk action for contacts + +## v7.3.75 + +- Redirect to channel page after WAC claim +- Fix org update pre form users roles list +- Adjust permission for org whatsapp connect view +- Ignore new conversation triggers without channels in imports + +## v7.3.74 + +- Use FB JS SDK for WAC signups + +## v7.3.73 + +- Add DB constraint to disallow active or waiting runs without a session + +## v7.3.72 + +- Add DB constraint to enforce that flow sessions always have output or output_url + +## v7.3.71 + +- Make sure all limits are updatable on the workspace update view +- Remove duplicated pagination +- Enforce channels limit per workspace + +## v7.3.70 + +- Fix workspace group limit check for existing group import +- Drop no longer used role m2ms + +## v7.3.69 + +- Fix campaign links + +## v7.3.68 + +- Add WhatsApp API version choice field +- Stop writing to the role specific m2m tables +- Add pending events tab to contact details + +## v7.3.67 + +- Merge pull request #3865 from nyaruka/plivo_claim +- formatting +- Sanitize plivo app names to match new rules + +## v7.3.66 + +- Merge pull request #3864 from nyaruka/fix-WA-templates +- Fix message templates syncing for new categories + +## v7.3.65 + +- Fix surveyor joins so new users are added to orgmembership as well. + +## v7.3.64 + +- Fix fetching org users with given roles + +## v7.3.63 + +- Update mailroom_db command to correctly add users to orgs +- Stop reading from org role m2m tables + +## v7.3.62 + +- Fix rendering of dates on upcoming events list +- Data migration to backfill OrgMembership + +## v7.3.61 + +- Add missing migration + +## v7.3.60 + +- Data migration to fail active/waiting runs with no session +- Include scheduled triggers in upcoming contact events +- Add OrgMembership model + +## v7.3.59 + +- Spreadsheet layout for contact fields in new UI +- Adjust WAC channel claim to add system admin with user token + +## v7.3.58 + +- Clean up chat media treatment +- Add endpoint to get upcoming scheduled events for a contact +- Remove filtering by ticketer on tickets API endpoint and add indexes +- Add status to contacts API endpoint + +## v7.3.57 + +- Improve WAC phone number verification flow and feedback +- Adjust name of WAC channels to include the number +- Fix manage user update URL on org update page +- Support missing target_ids key in WAC responses + +## v7.3.56 + +- Fix deletion of users +- Cleanup user update form +- Fix missing users manage link page +- Add views to verify and register a WAC number + +## v7.3.55 + +- Update contact search summary encoding + +## v7.3.54 + +- Make channel type a property and use to determine redact values in HTTP request logs + +## v7.3.53 + +- Make WAC channel visible to beta group + +## v7.3.52 + +- Fix field name for submitted token + +## v7.3.51 + +- Use default API throttle rates for unauthenticated users +- Bump pyjwt from 2.3.0 to 2.4.0 +- Cache user role on org +- Add WhatsApp Cloud channel type + +## v7.3.50 + +- Make Twitter channels beta only for now +- Use cached role permissions for permission checking and fix incorrect permissions on some + API views +- Move remaining mockey patched methods on auth.User to orgs.User + +## v7.3.49 + +- Timings in export stats spreadsheet should be rounded to nearest second +- Include failed_reason/failed_reason_display on msg_created events +- Move more monkey patching on auth.User to orgs.User + +## v7.3.48 + +- Include first reply timings in ticket stats export +- Create a proxy model for User and start moving some of the monkey patching to proper methods on that + +## v7.3.47 + +- Data migration to backfill ticket first reply timings + +## v7.3.46 + +- Add new squashable model to track average ticket reply times and close times +- Add Ticket.replied_on + +## v7.3.45 + +- Add endpoint to export Excel sheet of ticket daily counts for last 90 days + +## v7.3.44 + +- Remove omnibox support for fetching by label and message +- Remove functionality for creating new label folders and creating labels with folders + +## v7.3.43 + +- Fix generating cloned flow names so they can't end with trailing spaces +- Deleting of globals should be soft like other types +- Simplify checking of workspace limits in UI and API + +## v7.3.42 + +- Data migration to backfill ticket daily counts + +## v7.3.41 + +- Reorganization of temba.utils.models +- Update the approach to the test a token is valid for FBA and IG channels +- Promote ContactField and Global to be TembaModels whilst for now retaining their custom name validation logic +- Add import support methods to TembaModel and use with Topic + +## v7.3.40 + +- Add workspace plan, disallow grandchild org creation. +- Add support for shared usage tracking + +## v7.3.39 + +- Move temba.utils.models to its own package +- Queue broadcasts to mailroom with their created_by +- Add teams to mailroom test database +- Add is_system to TembaModel, downgrade Contact to SmartModel + +## v7.3.38 + +- Make sure we request a FB long lived page token using a long lived user token +- Convert campaign and campaignevent to use real UUIDs, simplify use of constants in API + +## v7.3.37 + +- Don't forget to squash TicketDailyCount +- Fix imports of flows with ticket topic dependencies + +## v7.3.36 + +- Add migration to update names of deleted labels and add constraint to enforce uniqueness +- Move org limit checking from serializers to API views +- Generalize preventing deletion of system objects via the API and allow deleting of groups that are used in flows +- Serialized topics in the API should include system field +- Add name uniqueness constraints to Team and Topic +- Add Team and TicketDailyCount models + +## v7.3.35 + +- Tweaks to Topic model to enforce name uniqueness +- Add **str** and **repr** to TembaModel to replace custom methods and remove several unused ones +- Convert FlowLabel to be a TembaModel + +## v7.3.34 + +- Fix copying flows to generate a unique name +- Rework TembaModel to be a base model class with UUID and name + +## v7.3.33 + +- Use model mixin for common name functionality across models + +## v7.3.32 + +- Add DB constraint to enforce flow name uniqueness + +## v7.3.31 + +- Update components with resolved locked file + +## v7.3.29 + +- Fix for flatpickr issue breaking date picker +- ContactField.get_or_create should enforce name uniqeuness and ignore invalid names +- Add validation error when changing type of field used by campaign events + +## v7.3.28 + +- Tweak flow name uniqueness migration to honor max flow name length + +## v7.3.27 + +- Tweak header to be uniform treatment regardless of menu +- Data migration to make flow names unique +- Add flow.preview_start endpoint which calls mailroom endpoint + +## v7.3.26 + +- Fix mailroom_db command to set languages on new orgs +- Fix inline menus when they have no children +- Fix message exports + +## v7.3.25 + +- Fix modals on spa pages +- Add service button to org edit page +- Update to latest django +- Add flow name to message Export if we have it + +## v7.3.24 + +- Allow creating channel with same address when schemes do not overlap + +## v7.3.23 + +- Add status to list of reserved field keys +- Migration to drop ContactField.label and field_type + +## v7.3.22 + +- Update contact modified_on when deleting a group they belong to +- Add custom name validator and use for groups and flows + +## v7.3.21 + +- Fix rendering of field names on contact read page +- Stop writing ContactField.label and field_type + +## v7.3.20 + +- Stop reading ContactField.label and field_type + +## v7.3.19 + +- Correct set new ContactField fields in mailroom_db test_db commands +- Update version of codecov action as well as versions of rp-indexer and mailroom used by tests +- Data migration to populate name and is_system on ContactField + +## v7.3.18 + +- Give contact fields a name and is_system db field +- Update list of reserved keys for contact fields + +## v7.3.17 + +- Fix uploading attachments to properly get uploaded URL + +## v7.3.16 + +- Fix generating of unique flow, group and campaign names to respect case-insensitivity and max name length +- Add data migration to prefix names of previously deleted flows +- Prefix flow names with a UUID when deleted so they don't conflict with other flow names +- Remove warning about feature on flow start modal being removed + +## v7.3.15 + +- Check name uniqueness on flow creation and updating +- Cleanup existing field validation on flow and group forms +- Do not fail to release a channel when we cannot reach the Facebook API for FB channels + +## v7.3.14 + +- Convert flows to be a soft dependency + +## v7.3.13 + +- Replace default index on FlowRun.contact with one that includes flow_id + +## v7.3.12 + +- Data migration to give every workspace an Open Tickets smart system group + +## v7.3.11 + +- Fix bulk adding/removing to groups from contact list pages +- Convert groups into a soft dependency for flows +- Use dataclasses instead of NaamedTuples where appropriate + +## v7.3.10 + +- Remove path from example result in runs API endpoint docs +- Prevent updating or deleting of system groups via the API or UI +- Add system property to groups endpoint and fix docs + +## v7.3.9 + +- Remove IG channel beta gating + +## v7.3.8 + +- Fix fetching of groups from API when using separate readonly DB connection + +## v7.3.7 + +- Rework how we fetch contact groups + +## v7.3.6 + +- For FB / IG claim pages use expiring token if no long lived token is provided + +## v7.3.5 + +- Data migration to update group_type=U to M|Q + +## v7.3.4 + +- Merge pull request #3734 from nyaruka/FB-IG-claim + +## v7.3.3 + +- Check all org groups when creating unique group names +- Make ContactGroup.is_system non-null and switch to using to distinguish between system and user groups + +## v7.3.2 + +- Data migration to populate ContactGroup.is_system + +## v7.3.1 + +- Add is_system field to ContactGroup and rename 'dynamic' to 'smart' +- Return 404 from edit_sub_org if org doesn't exist +- Use live JS SDK for FBA and IG refresh token views +- Add scheme to flow results exports + +## v7.3.0 + +- Add countries supported by Africastalking +- Replace empty squashed migrations with real ones + +## v7.2.4 + +- Update stable versions in README + +## v7.2.3 + +- Add empty versions of squashed migrations to be implemented in 7.3 + +## v7.2.2 + +- Updated translations from Transifex +- Fix searching on calls list page + +## v7.2.1 + +- Update locale files + +## v7.2.0 + +- Disallow PO export/import for archived flows because mailroom doesn't know about them +- Add campaigns section to new UI + +## v7.1.82 + +- Update to latest flake8, black and isort + +## v7.1.81 + +- Remove unused collect_metrics_task +- Bump dependencies + +## v7.1.80 + +- Remove progress bar on facebook claim +- Replace old indexes based on flows_flowrun.is_active + +## v7.1.79 + +- Remove progress dots for FBA and IG channel claim pages +- Actually drop exit_type, is_active and delete_reason on FlowRun +- Fix group name validation to include system groups + +## v7.1.78 + +- Test with latest indexer and mailroom +- Stop using FlowRun.exit_type, is_active and delete_reason + +## v7.1.77 + +- Tweak migration as Postgres won't let us drop function being used + +## v7.1.76 + +- Update vonage deprecated methods + +## v7.1.75 + +- Rework flowrun db triggers to use status rather than exit_type or is_active + +## v7.1.74 + +- Allow archiving of flow messages +- Don't try interrupting session that is about to be deleted +- Tweak criteria for who can preview new interface + +## v7.1.73 + +- Data migration to fix facebook contacts name + +## v7.1.72 + +- Revert database trigger changes which stopped deleting path and exit_type counts on flowrun deletion + +## v7.1.71 + +- Fix race condition in contact deletion +- Rework flowrun database triggers to look at delete_from_results instead of delete_reason + +## v7.1.69 + +- Update to latest floweditor + +## v7.1.68 + +- Add FlowRun.delete_from_results to replace delete_reason + +## v7.1.67 + +- Drop no longer used Msg.delete_reason and delete_from_counts columns +- Update to Facebook Graph API v12 + +## v7.1.66 + +- Fix last reference to Msg.delete_reason in db triggers and stop writing that on deletion + +## v7.1.65 + +- Rework msgs database triggers so we don't track counts for messages in archives + +## v7.1.64 + +- API rate limits should be org scoped except for staff accounts +- Expose current flow on contact read page for all users +- Add deprecation text for restart_participants + +## v7.1.63 + +- Fix documentation of contacts API endpoint +- Release URN channel events in data migration to fix deleted contacts with tickets +- Use original filename inside UUID folder to upload media files + +## v7.1.62 + +- Tweak migration to only fully delete inactive contacts with tickets + +## v7.1.61 + +- Add flow field to contacts API endpoint +- Add support to the audit_es command for dumping ES queries +- Add migration to make sure contacts which we failed to delete are really deleted +- Fix contact release with tickets having a broadcast + +## v7.1.60 + +- Adjust WA message template warning to not be show for Twilio WhatsApp channels +- Add support to increase API rates per org + +## v7.1.59 + +- Add migration to populate Contact.current_flow + +## v7.1.58 + +- Restrict msg visibility changes on bulk actions endpoint + +## v7.1.57 + +- Add sentry id for 500 page +- Display current flow on contact read page for beta users +- Add new msg visibility for msgs deleted by senders and allow deleted msgs to appear redacted in contact histories +- Contact imports should strip empty rows, missing a UUID or URNs + +## v7.1.56 + +- Fix issue with sending to step_node +- Add missing languages for whatsapp templates +- Add migration to remove inactive contacts from user groups + +## v7.1.55 + +- Fix horizontal scrolling in editor +- Add support to undo_footgun command to revert status changes + +## v7.1.53 + +- Relayer syncing should ignore bad URNs that fail validation in mailroom +- Add unique constraint to ContactGroup to enforce name uniqueness within an org + +## v7.1.52 + +- Fix scrolling select + +## v7.1.51 + +- Merge pull request #3671 from nyaruka/ui-widget-fixes +- Fix select for slow clicks and removing rules in the editor + +## v7.1.50 + +- Add migration to make contact group names unique within an organization +- Add cookie based path to opt in and out of new interface + +## v7.1.49 + +- Update to Django 4 + +## v7.1.48 + +- Make IG channel beta gated +- Remove expires_on, parent_uuid and connection_id fields from FlowRun +- Add background flow options to campaign event dialog + +## v7.1.47 + +- Make FlowSession.wait_resume_on_expire not-null + +## v7.1.46 + +- Add migration to set wait_resume_on_expire on flow sessions +- Update task used to update run expirations to also update them on the session + +## v7.1.45 + +- Make FlowSession.status non-null and add constraint to ensure waiting sessions have wait_started_on and wait_expires_on set + +## v7.1.44 + +- Fix login via password managers +- Change gujarati code language to 'guj' +- Add instagram channel type +- Add interstitial when inactive contact search meets threshold + +## v7.1.42 + +- Add missing migration + +## v7.1.41 + +- Add Contact.current_flow + +## v7.1.40 + +- Drop FlowRun.events and FlowPathRecentRun + +## v7.1.39 + +- Include qrious.js script +- Add FlowSession.wait_resume_on_expire +- Add Msg.flow + +## v7.1.38 + +- Replace uses of deprecated Django functions +- Remove crisp and librato analytics backends and add ConsoleBackend as example +- Data migration to populate FlowSession.wait_started_on and wait_expires_on + +## v7.1.37 + +- Migration to remove recent run creation from db triggers +- Remove no longer used recent messages view and functionality on FlowPathRecentRun + +## v7.1.36 + +- Add scheme column on contact exports for anon orgs +- Remove option to include router arguments in downloaded PO files +- Make loading of analytics backends dynamic based on setting of backend class paths + +## v7.1.35 + +- Only display crisp support widget if brand supports it +- Do crisp chat widget embedding via analytics template hook + +## v7.1.34 + +- Update to editor v1.16.1 + +## v7.1.33 + +- Add management to fix broken flows +- Use new recent contacts endpoint for editor + +## v7.1.32 + +- Temporarily put crisp_website_id back in context + +## v7.1.31 + +- Remove include_msgs option of flow result exports + +## v7.1.30 + +- Update to latest flow editor + +## v7.1.29 + +- Update to latest floweditor +- Add FlowSession.wait_expires_on +- Improve validation of flow expires values +- Remove segment and intercom integrations and rework librato and crisp into a pluggable analytics framwork + +## v7.1.28 + +- Convert FlowRun.id and FlowSession.id to BIGINT + +## v7.1.27 + +- Drop no longer used FlowRun.parent + +## v7.1.26 + +- Prefer UTF-8 if we're not sure about encoding of CSV import + +## v7.1.25 + +- Fix Kaleyra claim blurb +- Fix HTTPLog read page showing warning shading for healthy calls + +## v7.1.24 + +- Fix crisp identify on signup +- Use same event structure for Crisp as others + +## v7.1.23 + +- Update help links for the editor +- Add failed reason for failed destination such as missing channel or URNs +- Add view to fetch recent contacts from Redis + +## v7.1.22 + +- Fix join syntax + +## v7.1.21 + +- Fix join syntax, argh + +## v7.1.20 + +- Arrays not allowed on track events + +## v7.1.19 + +- Add missing env to settings_common + +## v7.1.18 + +- Implement crisp as an analytics integration + +## v7.1.17 + +- Tweak event tracking for results exports +- Revert change to hide non-responded runs in UI + +## v7.1.16 + +- Drop Msg.response_to +- Drop Msg.connection_id + +## v7.1.15 + +- Remove path field from API runs endpoint docs +- Hide options to include non-responded runs on results download modal and results page +- Fix welcome page widths +- Update mailroom_db to require pg_dump version 12.\* +- Update temba-components +- Add workspace page to new UI + +## v7.1.14 + +- Fix wrap for recipients list on flow start log +- Set Msg.delete_from_counts when releasing a msg +- Msg.fail_old_messages should set failed_reason +- Add new fields to Msg: delete_from_counts, failed_reason, response_to_external_id +- Tweak msg_dewire command to only fetch messages which have never errored + +## v7.1.13 + +- Add management command to dewire messages based on a file of ids +- Render webhook calls which are too slow as errors + +## v7.1.12 + +- Remove last of msg sending code +- Fix link to webhook log + +## v7.1.11 + +- Remove unnecessary conditional load of jquery + +## v7.1.10 + +- Make forgot password email look a little nicer and be easier to localize + +## v7.1.9 + +- Fix email template for password forgets + +## v7.1.8 + +- Remove chatbase as an integration as it no longer exists +- Clear keyword triggers when switching to flow type that doesn't support them +- Use branded emails for export notifications + +## v7.1.5 + +- Remove warning on flow start modal about settings changes +- Add privacy policy link +- Test with Redis 3.2.4 +- Updates for label sub menu and internal menu navigation + +## v7.1.4 + +- Remove task to retry errored messages which now handled in mailroom + +## v7.1.2 + +- Update poetry dependencies +- Update to latest editor + +## v7.1.1 + +- Remove channel alert notifications as these will become incidents +- Add Incident model as well as OrgFlagged and WebhooksUnhealthy types + +## v7.1.0 + +- Drop no longer used index on msg UUID +- Re-run collect_sql +- Use std collection types for typing hints and drop use of object in classes + +## v7.0.4 + +- Fix contact stop list page +- Update to latest black to fix errors on Python 3.9.8 +- Add missing migration + +## v7.0.3 + +- Update to latest editor v1.15.1 +- Update locale files which adds cs and mn + +## v7.0.2 + +- Update editor to v1.15 with validation fixes +- Fix outbox pagination +- Add generic title bar with new dropdown on spa + +## v7.0.1 + +- Add missing JS function to delete messages in the archived folder +- Update locale files + +## v7.0.0 + +- Fix test failing to due bad domain lookup + +## v6.5.71 + +- Add migration to remove deleted contacts and groups from scheduled broadcasts +- Releasing a contact or group should also remove it from scheduled broadcasts + +## v6.5.70 + +- Fix intermittent credit test failure +- Tidy up Msg and Broadcast constants +- Simplify settings for org limit defaults +- Fix rendering of deleted contacts and groups in recipient lists + +## v6.5.69 + +- Remove extra labels on contact fields + +## v6.5.68 + +- Reenable chat monitoring + +## v6.5.67 + +- Make ticket views and components in sync + +## v6.5.66 + +- Add channel menu +- Add test for dynamic contact group list, remove editor_next redirect +- Fix styling on contact list headersa and flow embedding +- Add messages to menu, refresh override +- Switch contact fields and import to use template inheritance +- Use template inheritance for spa work +- Add deeplinking support for non-menued destinations + +## v6.5.65 + +- Move to Python 3.9 + +## v6.5.64 + +- Fix export notification email links + +## v6.5.63 + +- When a contact is released their tickets should be deleted +- Test on PG 12 and 13 +- Use S3 Select for message exports +- Use new notifications system for export emails + +## v6.5.62 + +- Use crontab for WA tokens task schedule +- Allow keyword triggers to be single emojis +- Celery 5.x + +## v6.5.60 + +- Add option to audit_archives to check flow run counts +- Drop no longer used ticket subject column +- Add contact read page based on contact chat component + +## v6.5.59 + +- Less progress updates in audit_archives +- Tweak tickets API endpoint to accept a uuid URL param + +## v6.5.58 + +- Add progress feedback to audit_archives +- Update locale files + +## v6.5.57 + +- Fix Archive.rewrite + +## v6.5.56 + +- Encode content hashes sent to S3 using Base64 + +## v6.5.55 + +- Trim mailgun ticketer names to <= 64 chars when creating +- Management command to audit archives +- Use field limiting on omnibox searches + +## v6.5.54 + +- Fix S3 select query generation for date fields + +## v6.5.53 + +- Disable all sentry transactions +- Use S3 select for flow result exports +- Add utils for compiling S3 select queries + +## v6.5.52 + +- Merge pull request #3555 from nyaruka/ticket-att +- Update test to include attachment list for last_msg +- Update CHANGELOG.md for v6.5.51 +- Merge pull request #3553 from nyaruka/httplog_tweaks +- Merge pull request #3554 from nyaruka/s3_retries +- Add other missing migration +- Add retry config to S3 client +- Add missing migration to drop WebhookResult model +- Update CHANGELOG.md for v6.5.50 +- Merge pull request #3552 from nyaruka/fix-WA-check-health-logs +- Fix tests +- Add zero defaults to HTTPLog fields, drop WebHookResult and tweak HTTPLog templates for consistency +- Fix response for WA message template to be HTTP response +- Update CHANGELOG.md for v6.5.49 +- Merge pull request #3549 from nyaruka/retention_periods +- Merge pull request #3546 from nyaruka/readonly_exports +- Merge pull request #3548 from nyaruka/fix-WA-check-health-logs +- Merge pull request #3550 from nyaruka/truncate-org +- Use single retention period setting for all channel logs +- Truncate org name with ellipsis on org chooser +- Add new setting for retention periods for different types and make trimming tasks more consistent +- Use readonly database connection for contact, message and results exports +- Add migration file +- Log update WA status error using HTTPLog + +## v6.5.51 + +- Add retry config to S3 client +- Add zero defaults to HTTPLog fields, drop WebHookResult and tweak HTTPLog templates for consistency + +## v6.5.50 + +- Fix response for WA message template to be HTTP response + +## v6.5.49 + +- Truncate org name with ellipsis on org chooser +- Add new setting for retention periods for different types and make trimming tasks more consistent +- Use readonly database connection for contact, message and results exports +- Log update WA status error using HTTPLog + +## v6.5.48 + +- Fix clear contact field event on ticket history + +## v6.5.47 + +- Use readonly database connection for contacts API endpoint +- Use webhook_called events from sessions for contact history +- Remove unused webhook result views and improve httplog read view +- Fix API endpoints not always using readonly database connection and add testing + +## v6.5.46 + +- Move list refresh registration out of content block + +## v6.5.45 + +- Temporarily disable refresh +- Don't use readonly database connection for GETs to contacts endpoint +- Add view for webhook calls saved as HTTP logs +- Pass location support flag to editor as a feature flag + +## v6.5.44 + +- GET requests to API should use readonly database on the view's queryset + +## v6.5.43 + +- Tweak how HTTP logs are deleted +- Add num_retries field to HTTPLog + +## v6.5.42 + +- Pin pyopenxel to 3.0.7 until 3.0.8 release problems resolved +- Add new fields to HTTPLog to support saving webhook results +- Make TPS for Shaqodoon be 5 by default +- Make location support optional via new branding setting + +## v6.5.41 + +- Update editor with fix for field creation +- Minor tidying of HTTPLog +- Fix rendering of tickets on contact read page which now don't have subjects + +## v6.5.40 + +- Update to floweditor 1.14.2 +- Tweak database settings to add new readonly connection and remove no longer used direct connection +- Update menu on ticket list update + +## v6.5.38 + +- Deprecate subjects on tickets in favor of topics +- Tweak ticket bulk action endpoint to allow unassigning +- Add API endpoint to read and write ticket topics + +## v6.5.37 + +- Add tracking of unseen notification counts for users +- Clear ticket notifications when visiting appropriate ticket views +- Remove no longer used Log model + +## v6.5.36 + +- Revert cryptography update + +## v6.5.35 + +- Update to newer pycountry and bump other minor versions +- Fix ticketer HTTP logs not being accessible +- Add management command to re-eval a smart group +- Add comment to event_fires about mailroom issue +- Fix indexes on tickets to match new UI +- Now that mailroom is setting ContactImport.status, use in reads + +## v6.5.34 + +- Update to latest components (fixes overzealous list refresh, non-breaking ticket summary, and display name when created_by is null) + +## v6.5.33 + +- Fix Add To Group bulk action on contact list page +- Add status field to ContactImport and before starting batches, set redis key mailroom can use to track progress +- Delete unused template and minor cleanup + +## v6.5.32 + +- Fix template indentation +- Pass force=True when closing ticket as part of releasing a ticketer +- Add beginings of new nav and SPA based UI (hidden from users for now) + +## v6.5.31 + +- Show masked urns for contacts API on anon orgs +- Rework notifications, don't use Log model + +## v6.5.30 + +- Fix deleting of imports and exports now that they have associated logs + +## v6.5.29 + +- Add basic (and unused for now) JSON endpoint for listing notifications +- Reduce sentry trace sampling to 0.01 +- Override kir language name +- Add change_topic as action to ticket bulk actions API endpoint +- Add Log and Notification model + +## v6.5.28 + +- Add new ticket event type for topic changes +- Migrations to assign default topic to all existing tickets + +## v6.5.27 + +- Add migration to give all existing orgs a default ticket topic + +## v6.5.26 + +- Move mailroom_db data to external JSON file +- Run CI tests with latest mailroom +- Add ticket topic model and initialize orgs with a default topic + +## v6.5.25 + +- Improve display of channels logs for calls + +## v6.5.24 + +- Add machine detection as config option to channels with call role +- Tweak event_fires management command to show timesince for events in the past + +## v6.5.23 + +- Drop retry_count, make error_count non-null +- Improve channel log templates so that we use consistent date formating, show call error reasons, and show back button for calls +- Tweak how we assert form errors and fix where they don't match exactly +- Re-add QUEUED status for channel connections + +## v6.5.22 + +- Tweak index used for retrying IVR calls to only include statuses Q and E +- Dont show ticket events like note added or assignment on contact read page +- Include error reason in call_started events in contact history +- Remove channel connection statuses that we don't use and add error_reason + +## v6.5.21 + +- Prevent saving of campaign events without start_mode +- Improve handling of group lookups in contact list views +- Add button to see channel error logs + +## v6.5.20 + +- Make ChannelConnection.error_count nullable so it can be removed +- Cleanup ChannelConnection and add index for IVR retries +- Fix error display on contact update modal +- Update to zapier app directory, wide formax option and fixes +- Enable filtering on the channel log to see only errors + +## v6.5.19 + +- Fix system group labels on contact read page +- Use shared error messages for orgs being flagged or suspended +- Update to latest smartmin (ignores \_format=json on views that don't support it) +- Add command to undo events from a flow start +- Send modal should validate URNs +- Use s3 when appropriate to get session output +- Add basic user accounts API endpoint + +## v6.5.18 + +- Apply webhook ticket fix to successful webhook calls too + +## v6.5.17 + +- Tweak error message on flow start modal now field component is fixed +- Fix issue for ticket window growing with url length +- Update LUIS classifiers to work with latest API requirements +- Tweak migration to populate contact.ticket_count so that it can be run manually +- Switch from django.contrib.postgres.fields.JSONField to django.db.models.JSONField +- Introduce s3 utility functions, use for reading s3 sessions in contact history + +## v6.5.16 + +- Update to Django 3.2 +- Migration to populate contact.ticket_count + +## v6.5.15 + +- Add warning to flow start modal that options have changed +- Fix importing of dynamic groups when field doesn't exist + +## v6.5.14 + +- Update to latest cryptography 3.x +- Add deep linking for tickets +- Update db trigger on ticket table to maintain contact.ticket_count + +## v6.5.13 + +- Tweak previous data migration to work with migrate_manual + +## v6.5.12 + +- Migration to zeroize contact.ticket_count and make it non-null + +## v6.5.11 + +- Allow deletion of fields used by campaign events +- Add last_activity_on to ticket folder endpoints +- Add API endpoint for ticket bulk actions +- Add nullable Contact.ticket_count field + +## v6.5.10 + +- Remove textit-whatsapp channel type +- Show ticket counts on ticketing UI +- Update to latest components with fixes for scrollbar and modax reuse +- Use new generic dependency delete modal for contact fields + +## v6.5.9 + +- Add management command for listing scheduled event fires +- Add index for ticket count squashing task +- Add data migration to populate ticket counts +- Add constraint to Msg to disallow sent messages without sent_on and migration to fix existing messages like that + +## v6.5.8 + +- Fix celery task name + +## v6.5.7 + +- Fix flow start modal when starting flows is blocked +- Add more information to audit_es_group command +- Re-save Flow.has_issues on final flow inspection at end of import process +- Add squashable model for ticket counts +- Add usages modal for labels as well +- Update the WA API version for channel that had it set when added +- Break out ticket folders from status, add url state + +## v6.5.6 + +- Set sent_on if not already set when handling a mt_dlvd relayer cmd +- Display sent_on time rather than created_on time in Sent view +- Only sample 10% of requests to sentry +- Fix searching for scheduled broadcasts +- Update Dialog360 API usage + +## v6.5.5 + +- Fix export page to use new filter to get non-localized class name for ids +- Fix contact field update +- Add searchable to trigger groups +- Add option to not retry IVR calls +- Add usages modal for groups +- Tweak wording on flow start modal + +## v6.5.4 + +- Rework flow start modal to show options as exclusions which are unchecked by default +- Change sent messages view to be ordered by -sent_on + +## v6.5.3 + +- Add Last Seen On as column to contact exports +- Resuable template for dependency lists + +## v6.5.2 + +- Internal ticketer for all orgs + +## v6.5.1 + +- Cleanup Msg CRUDL tests +- Cleanup squashable models +- Apply translations in fr +- Replace trigger folders with type specific filtered list pages so that they can be sortable within types + +## v6.4.7 + +- Update flow editor to include lone-ticketer submit fix +- Fix pagination on the webhook results page + +## v6.4.6 + +- Update flow editor to fix not being able to play audio attachments in simulator + +## v6.4.4 + +- Start background flows with include_active = true +- Update flow editor with MediaPlayer fix +- Fix poetry content-hash to remove install warning +- Update translations from transifex + +## v6.4.3 + +- Improve contact field forms +- Fix urn sorting on contact update +- Improve wording on forms for contact groups, message labels and flow labels +- Improve wording on campaign form + +## v6.4.2 + +- Fix attachment button when attachments don't have extensions +- Add missing ticket events to contact history +- Fix clicking attachments in msgs view sometimes navigating to contact page +- Parameterized form widgets. Bigger, darker form bits. +- Tweak trigger forms for clarity +- Add command to rebuild messages and pull translations from transifex + +## v6.4.1 + +- Fix unassigning tickets + +## v6.4.0 + +- Update README + +## v6.3.90 + +- Fix alias editor to post json + +## v6.3.89 + +- Remove beta grating of internal ticketers +- Control which users can have tickets assigned to them with a permission +- Use mailroom endpoints for ticket assignment and notes +- Add custom user recover password view + +## v6.3.88 + +- Fix to display email on manage orgs +- Drop no longer used Broadcast.is_active field + +## v6.3.87 + +- Update indexes on ticket model +- Tweak ticketer default names +- Add empty ticket list treatment +- Fix API docs for messages endpoint to mention attachments rather than the deprecated media field +- Update to editor with hidden internal ticketers +- Consistent setting of modified_by when releasing/archiving/restoring +- Remove old ticket views +- Change ticketer sections on org home page to have Remove button and not link to old ticket views +- Add assignee to ticketing endpoints, some new filters and new assignment view + +## v6.3.86 + +- Stop writing Broadcast.is_active as default value +- Fix keyword triggers being imported without a valid match_type + +## v6.3.85 + +- User the current user as the manual trigger user during simulation +- Better trigger exports and imports +- Make broadcast.is_active nullable and stop filtering by it in the API + +## v6.3.84 + +- Ignore scheduled triggers in imports because they don't import properly +- Fix redirect after choosing an org for users that can't access the inbox +- Optionally filter ticket events by ticket in contact history view + +## v6.3.83 + +- Fix default content type for pjax requests +- Tweak queuing of flow starts to include created_by_id + +## v6.3.82 + +- Revert recent formax changes + +## v6.3.81 + +- Add Broadcast.ticket and expose as field (undocumented for now) on broadcast write API endpoint +- Refactor scheduling to use shared form +- Add exclusion groups to scheduled triggers + +## v6.3.80 + +- Update components so omnibox behaves like a field +- Drop Language model and Org.primary_language field + +## v6.3.79 + +- Order tickets by last_activity_on and update indexes to reflect that +- Backfill ticketevent.contact and use that for fetching events in contact history +- Fix creating scheduled triggers not being able to see week day options +- Handle reopen events for tickets +- Stop creating Language instances or setting Org.primary_language + +## v6.3.78 + +- Add Ticket.last_activity_on and TicketEvent.contact +- Rreturn tickets by modified_on in the API +- Add ability to reverse results for runs/contacts API endpoints + +## v6.3.77 + +- Better validation of invalid tokens when claiming Zenvia channels +- Fix languages formax to not allow empty primary language + +## v6.3.76 + +- Read org languages from org.flow_languages instead of Language instances + +## v6.3.75 + +- Fix closing and reopening of tickets from API + +## v6.3.74 + +- Add better labels and help text for groups on trigger forms +- Load ticket events from database for contact histories +- Fix rendering of closed ticket triggers on trigger list page +- Fix rendering of ticket events as JSON +- Fix for delete modals + +## v6.3.73 + +- Backfill ticket open and close events +- Add support for closed ticket triggers + +## v6.3.72 + +- Add CSRF tokens to modaxes + +## v6.3.70 + +- Add CSRF token to modax form +- Tweak padding for nav so we don't overlap alerts +- Only require current password to change email or password +- Fix icon colors on latest chrome +- Migration to backfill Org.flow_languages + +## v6.3.69 + +- Add Org.flow_languages and start populating in Org.set_languages +- Raise the logo so it can be clicked + +## v6.3.68 + +- Enable exclusion groups on triggers and make groups an option for all trigger types +- Add users to mailroom test db +- Add ticket note support to UI + +## v6.3.67 + +- Pass user id to ticket/close ticket/reopen endpoints to use in the TicketEvent mailroom creates +- Model changes for ticket assignment +- Make flow session output URL have a max length of 2048 + +## v6.3.66 + +- Add new ticket event model +- Add output_url field to FlowSession + +## v6.3.65 + +- Fix rendering of recipient buttons on outbox +- Rework trigger create forms to make conflict handling more consistent +- Iterate through all pages when syncing whatsapp templates + +## v6.3.64 + +- URL field on HTTPRequestLog should have max length of 2048 + +## v6.3.63 + +- Drop unused index on contact name, and add new org+modified_on index + +## v6.3.62 + +- Update components to single mailroom resource for completion + +## v6.3.60 + +- Only retry 5000 messages at a time, prefetch channel and fields + +## v6.3.59 + +- Enable model instances to show an icon in selects + +## v6.3.58 + +- Add model changes for closed ticket triggers +- Add model changes for exclude groups support on triggers + +## v6.3.57 + +- Tweak mailroom_db to make contact created_on values fixed +- Add trigger type folder list views +- Fix filtering of flows for new conversation triggers +- Fix ordering of channel fields on triggers +- Tweak inspect_flows command to handle unreadable flows +- Nest group buttons on campaign list so they don't grow to largest cell + +## v6.3.56 + +- Fix migrating flows whose definitions contain decimal values +- Update to tailwind 2, fix security warnings +- Simplify org filtering on CRUDLs +- Remove IS_PROD setting + +## v6.3.55 + +- Update layout and color for badge buttons +- Add management command to inspect flows and fix has_issues where needed +- Fix deleting flow labels with parents +- Fix broken org delete modal +- Add user arg to Org.release and User.release + +## v6.3.54 + +- Optimize message retries with a perfect index +- Convert channels to soft dependencies + +## v6.3.53 + +- Update to latest temba-components + +## v6.3.52 + +- Update to latest floweditor +- Adjust WA templates page title +- Fix Dialog360 WA templates sync + +## v6.3.51 + +- Adjust WA templates page styles +- Migration to clear next_attempt for android channels + +## v6.3.50 + +- Resend messages using web endpoint rather than task +- Convert message labels, globals and classifiers to use soft dependencies + +## v6.3.49 + +- Make Msg.next_attempt nullable and add msgs to mailroom_db +- Migration to ensure that inactive flows don't have any deps +- Fix Flow.release to remove template deps + +## v6.3.48 + +- Calculate proper msg id commands from relayer that have integer overflow issue +- Add reusable view for dependency deleting modals and switch to that and soft dependencies for ticketers +- Don't do mailroom session interruption during org deletion +- Fix org deletion when broadcasts have parents and webhook results have contacts +- Make sure templates and templates translations are deleted on org release +- Set max fba pages limit to 200 + +## v6.3.47 + +- Display warning icon in flow list for flows with issues +- Make Flow.has_issues non-null and cleanup unused localized strings on Flow model +- Support syncing Dialog360 Whatsapp templates + +## v6.3.46 + +- Fix channel log icons and disallow message resending for suspended orgs +- Add migration to populate Flow.has_issues + +## v6.3.45 + +- Add migration to populate template namespace +- Expose template translation namespace field on API +- Don't save issues into flow metadata but just set new field has_issues instead +- Queue mailroom task to do msg resends + +## v6.3.44 + +- Tweak import preview page so when adding to a group isn't enabled, the group controls are disabled +- Update flow editor and temba-components + +## v6.3.40 + +- Add namespace field to template translations +- Fetching and saving revisions should return flow issues as separate field + +## v6.3.39 + +- Rework task for org deletion + +## v6.3.38 + +- Move tickets endpoint to tickets crudl +- Refactor WhatsApp templates +- Add task for releasing of orgs + +## v6.3.37 + +- Fix contact imports always creating new groups +- Migration to fix escaped nulls in flow revision definitions +- Rework beta gated agent views to be tikect centric + +## v6.3.35 + +- Clear primary language when releasing org +- Strip out NULL characters when serializing JsonAsTextField values +- Override language names and ensure overridden names are used for searching and sorting + +## v6.3.33 + +- Update components and flow editor to common versions +- Allow external ticketers to use agent ui, add footer to tickets + +## v6.3.32 + +- Release import batches when releasing contact imports + +## v6.3.31 + +- Fix serializing JSON to send to mailroom when it includes decimals + +## v6.3.30 + +- Restrict org languages to ISO-639-1 plus explicit inclusions + +## v6.3.29 + +- Move Twilio, Plivo and Vonage number searching views into their respective channel packages +- Optimize query for fetching contacts with only closed tickets +- Release contact imports when releasing groups +- Proper skip anonymous user for analytics + +## v6.3.28 + +- Remove simplejson +- Update to latest vonage client and fix retries + +## v6.3.27 + +- Restore menu-2 icon used by org choose menu + +## v6.3.26 + +- Make groups searchable on contact update page + +## v6.3.25 + +- Add beta-gated tickets view + +## v6.3.24 + +- Change analytics.track to expect a user argument +- Add org released_on, use when doing full releases +- Ignore anon user in analytics + +## v6.3.23 + +- Clean up countries code used by various channel types + +## v6.3.22 + +- Show results in flow order + +## v6.3.21 + +- Fix Javascript error on two factor formax +- Beta-gate chatbase integration for now + +## v6.3.20 + +- Rework DT One and Chatbase into a new integrations framework +- Expose Org.language as default language for new users on org edit form + +## v6.3.19 + +- Add support for Zenvia SMS +- Cleanup parsing unused code on org model +- Fix flow update forms to show correct fields based on flow type +- Tweak JSONAsTextField to allow underlying DB column to be migrated to JSONB +- Add controls to import preview page for selecting existing groups etc + +## v6.3.18 + +- Fix template names + +## v6.3.17 + +- Fix font reference in scss + +## v6.3.16 + +- Add group name field to contact imports so that it can be customized +- Rename Nexmo to Vonage, update icon +- Merge the two used icomoon sets into one and delete unused one +- Cleanup problems in org view templates + +## v6.3.15 + +- Revert wording changes when orgs don't have email settings to clarify that we do send +- Fix wording of Results link in editor + +## v6.3.14 + +- Fix locale files +- Fix SMTP server settings views to explain that we don't send emails if you don't have a config +- Add API endpoint to fetch tickets filterable by contact + +## v6.3.13 + +- Clarify terms for exports vs downloads +- Fix rendering of airtime events in contact history +- Add flows import and flow exports links in the flows tab + +## v6.3.12 + +- Update to latest flow-editor +- Cleanup unused dates methods +- Update markdown dependency +- Expose exclude_active on flow start read API +- Support 3 digits short code on Jasmin channel type +- Add support for YYYY-MM-DD date format +- Update DT One support to collect api key and secret to use with new API +- Update parent remaining credits +- Release broadcasts properly + +## v6.3.11 + +- Fix redirect after submitting Start In Flow modal + +## v6.3.10 + +- Add support to exclude active contacts in other flows when starting a flow on API +- Remove unsupported channel field on broadcast create API endpoint +- Add Start Flow modal to contact read page +- Fix lock file being out of sync with pyproject + +## v6.3.9 + +- Revert update to use latest API version to get WA templates +- Fix setting Zenvia webhooks +- Update Django and Django REST Framework + +## v6.3.8 + +- Convert to poetry + +## v6.3.6 + +- Update pt_BR translation +- Update to use latest API version to get WA templates +- Display failed on flow results charts, more translations +- Zenvia WhatsApp + +## v6.3.5 + +- Fix broken flow results charts + +## v6.3.4 + +- Update to latest celery 4.x + +## v6.3.2 + +- Support reseting the org limits to the default settings by clearing the form field +- Update redis client to latest v3.5.3 +- Fix manage accounts form blowing up when new user has been created in background + +## v6.3.1 + +- Add support for runs with exit_type=F +- Support customization for org limits + +## v6.3.0 + +- Update stable versions and coverage badge link +- Style Outbox broadcasts with megaphone icons and use includes for other places we render contacts and groups +- Fix spacing on outbox view +- Add discord channel type + +## v6.2.4 + +- Update Portuguese translation +- Update to floweditor v1.13.5 + +## v6.2.3 + +- Update to latest floweditor v1.13.4 + +## v6.2.2 + +- Update to flow editor v1.13.3 +- Update Spanish translation +- Disable old Zenvia channel type +- Fix styles on fields list + +## v6.2.1 + +- Return registration details to Android if have the same UUID +- Add spacing between individual channel log events +- Fix external channel claim form +- Do not track Android channels creation by anon user + +## v6.2.0 + +- Update translations for es, fr and pt-BR +- Fix rendering of pending broadcasts in outbox view + +## v6.1.48 + +- Update editor with dial router changes +- Fix resthook formax validation + +## v6.1.47 + +- Change synched to synced +- Update to smartmin 2.3.5 +- Require recent authentication to view backup tokens + +## v6.1.46 + +- Update to smartmin 2.3.5 +- Fix handling of attempts to sync old unclaimed channels +- Add view to list all possible channel types +- Fix rendering of nameless channels + +## v6.1.45 + +- Open up 2FA to all users +- Do not allow duplicates invites +- Never respond with registration commands in sync handler + +## v6.1.44 + +- Enforce time limit between login and two factor verification +- Prevent inviting existing users +- Add disabled textinputs and better expression selection on selects +- Create failed login records when users enter incorrect backup tokens too many times +- Logout user to force login to accept invite and require invite email account exactly + +## v6.1.43 + +- Backup tokens can only be used once +- Add new 2FA management views + +## v6.1.42 + +- Use Twilio API to determine capabilities of new Twilio channels +- Fix result pages not loading for users using Spanish interface + +## v6.1.41 + +- Remove no longer used permissions +- Override login view to redirect to new views for two-factor authentication +- Reduce recent export window to 4 hours +- Change message campaign events to use background flows + +## v6.1.40 + +- Remove UserSettings.tel and add UserSettings.last_auth_on + +## v6.1.39 + +- Increase max len of URN fields on airtime transfers +- Add toggle to display manual flow starts only +- Cleanup 2FA models + +## v6.1.38 + +- Update flow editor to 1.12.10 with failsafe errors +- Make validation of external channel URLs disallow private and link local hosts +- Cleanup middleware used to set org, timezone and language + +## v6.1.37 + +- Update components and editor to latest versions +- Switch to microsecond accuracy timestamps +- Switch to default_storage for export assets + +## v6.1.33 + +- Tweaks to how we generate contact histories + +## v6.1.32 + +- Mute invalid host errors +- Add migration to alter m2ms to use bigints +- Drop no longer used database function +- Switch to big id for msgs and channel logs + +## v6.1.31 + +- Add management command to check sentry +- Remove unused context processor and unused code from org_perms + +## v6.1.29 + +- Rework contact history so that rendering as events happens in view and we also expose a JSON version + +## v6.1.26 + +- Upgrade urllib3 + +## v6.1.25 + +- Update to elastic search v7 + +## v6.1.24 + +- Broadcast events in history should be white like message events + +## v6.1.23 + +- Add index on flow start by start type +- Allow only deleting msg folders without active children labels +- Use engine events (with some extra properties) for msgs in contact history + +## v6.1.22 + +- Fix API serialization of background flow type +- Allow background flows to be used in scheduled triggers +- Update pip-tools + +## v6.1.21 + +- Configure editor and components to use completions files in current language + +## v6.1.20 + +- Update to latest floweditor and temba-components + +## v6.1.19 + +- Update to floweditor v1.12.6 +- Fix deleting classifiers + +## v6.1.18 + +- Add support for background flows + +## v6.1.17 + +- Update to flow editor v1.12.5 +- Fix importing dependencies when it's a clone in the same workspace +- Allow aliases to be reused on boundaries with different parent +- Increase max length on external channels to be configurable up to 6400 chars +- Fix contact export warning for existing export + +## v6.1.16 + +- Update to latest flow editor 1.12.3 +- Allow staff users to use the org chooser + +## v6.1.15 + +- Add constraint to chek URN identity mathes scheme and path +- Add non-empty constraint for URN scheme and path +- Fix contact list pagination with searches +- Show query on list page for smart groups + +## v6.1.14 + +- Change template translations to be TEXT +- Set global email timeout, fixes rapidpro #1345 +- Update tel parsing to match gocommon, fixing how we currently accept local US numbers + +## v6.1.13 + +- Bump temba-components to v0.8.11 + +## v6.1.12 + +- Un-beta-gate Rocket.Chat channels + +## v6.1.10 + +- Login summary on org home page should include agents +- Rework manage accounts UI to include agents + +## v6.1.9 + +- Fix deleted flow dependency preventing global deletion +- Cache lookups of auth.Group instances + +## v6.1.8 + +- For field columns in imports, only match against user fields +- Add agent role and cleanup code around org roles + +## v6.1.7 + +- Wire table listeners on pjax reload +- Update domain from swag.textit.com to whatsapp.textit.com +- Add internal ticketer type for BETA users +- Inner scrolling on contact list page +- Improve styles for recipient lists + +## v6.1.6 + +- Trim our start runs 1,000 at a time and by id +- Increase global max value length to 10000 and fix UI to be more consistent with fields + +## v6.1.5 + +- Share modals on globals list, truncate values +- Squash migrations + +## v6.1.4 + +- Add security settings file +- Fix intent selection on split by intent +- Add empty migrations for squashing in next release + +## v6.1.3 + +- Fix intent selection on split by intent +- Update callback URL for textit whatsapp +- Use Django password validators + +## v6.1.2 + +- Add TextIt WhatsApp channel type + +## v6.1.1 + +- Fix contact exports when orgs have orphaned URNs in schemes they don't currently use + +## v6.1.0 + +- Hide editor language dialog blurb until needed to prevent flashing +- Fix broken flows list page if org has no flows +- Allow underscores in global names +- Improve calculating of URN columns for exports so tests don't break every time we add new URN schemes +- Make instruction lists on channel claim pages more consistent + +## v6.0.8 + +- Editor fix for split by intents +- Add empty migrations for squashing in next release + +## v6.0.7 + +- Fix choose org page +- Fix recipient search +- Fix run deletion + +## v6.0.6 + +- Fix for textarea init + +## v6.0.5 + +- Adjust contact icon color in recipient lists + +## v6.0.4 + +- Fix recipients contacts and urns UI labels +- Fix flow starts log page pagination +- Update temba-components and flow-editor to common versions +- Fix flow label delete modal +- Fix global delete modal + +## v6.0.3 + +- Update to components v0.8.6, bugfix release +- Handle CSV imports in encodings other than UTF8 + +## v6.0.2 + +- Fix broken ticket re-open button +- Missing updated Fr MO file from previous merge +- Apply translations in fr + +## v6.0.1 + +- Fix orgs being suspended due to invalid topup cache +- Set uses_topups on new orgs based on whether our plan is the TOPUP_PLAN +- Fix validation issues on trigger update form +- Fix hover cursor in lists for viewers +- Action button alignment on archived messages +- Fix flow table header for viewers +- Fix tests for channel deletion +- Fix redirects for channel and ticketer deletion. +- Fix dialog when deleting channels with dependencies +- Match headers and contact fields with labels as well as keys during contact imports + +## v6.0.0 + +- Add Rocket.Chat ticketer to test database + +## v5.7.91 + +- Add Rocket.Chat ticketers + +## v5.7.90 + +- Update rocket.chat icon in correct font + +## v5.7.89 + +- Improve Rocket.Chat claim page +- Add Rocket.Chat icon + +## v5.7.87 + +- Cleanup Rocket.Chat UI + +## v5.7.86 + +- Add RocketChat channels (beta-only for now) + +## v5.7.85 + +- Add back jquery-migrate and remove debug + +## v5.7.84 + +- Remove select2, coffeescript, jquery plugins + +## v5.7.83 + +- Fix broken import link on empty contacts page +- Use consistent approach for limits on org +- Globals UI should limit creation of globals to org limit +- Fix archives list styles and add tabs for message and run archives +- Restyle the Facebook app channel claim pages +- Switch to use FBA type by default + +## v5.7.82 + +- Don't blow up if import contains invalid URNs but pass values on to mailroom +- Update to version of editor with some small styling tweaks +- Include occurred_on with mo_miss events queued to mailroom +- Adjust Twilio connect to redirect properly to the original claim page +- Remove no longer used FlowRun.timeout_on and drop two unused indexes +- Cleanup more localized strings with trimmed +- Fix 404 error in channel list + +## v5.7.81 + +- Add page title to brand so that its configurable +- Dont send alert emails for orgs that aren't using topups +- Consider timezone when infering org default country and display on import create page +- Add page titles to fields and flows +- Allow changing EX channels role on UI + +## v5.7.80 + +- Add contact last seen on to list contacts views +- Cleanup channel model fields +- Add charcount to send message dialog +- Show channel logs link for receive only channels +- Fix export flow page styles +- Allow searching for countries on channel claim views + +## v5.7.79 + +- Rework imports to allow importing multiple URNs of same scheme +- Cleanup no longer used URN related functionality +- Show contact last seen on on contact read page + +## v5.7.78 + +- Clean up models fields in contacts app + +## v5.7.77 + +- Fix styling on the API explorer page +- Fix list page selection for viewers +- Move contact field type constants to ContactField class +- Allow brand to be set by env variable + +## v5.7.76 + +- Drop support for migrating legacy expressions on API endpoints +- Fix imports blowing up when header is numerical +- Fix 11.4 flow migration when given broken send action +- Drop RuleSet and ActionSet models + +## v5.7.75 + +- Last tweaks before RuleSet and ActionSet can be dropped +- Contact id treatment for details +- Update components to ship ajax header and use it in language endpoint +- Remove no longer needed legacy editor completion + +## v5.7.74 + +- Remove legacy flow code +- WA channel tokens refresh catch errors for each channel independently + +## v5.7.73 + +- Make flows searchable and clickable on triggers +- Make flows searchable on edit campaign event + +## v5.7.72 + +- Fix editor whatsapp templates, refresh whatsapp channel pages +- Move omnibox module into temba.contacts.search + +## v5.7.71 + +- Remove legacy contact searching +- Remove code for dynamic group reevaluation and campaign event scheduling + +## v5.7.70 + +- Fix pdf selection + +## v5.7.69 + +- Validate language codes passed to contact API endpoint +- Don't actually create a broadcast if sending to node but nobody is there +- Update to latest floweditor + +## v5.7.67 + +- Fix globals endpoint so name is required +- Filter by is_active when updating fields on API endpoint + +## v5.7.66 + +- Replace remaining Contact.get_or_create calls with mailroom's resolve endpoint + +## v5.7.65 + +- URN lookups onthe contact API endpoint should be normalized with org country +- Archiving a campaign should only recreate events + +## v5.7.64 + +- Don't create contacts and URNs for broadcasts but instead defer the raw URNs to mailroom + +## v5.7.63 + +- Validate that import files don't contain duplicate UUIDs or URNs + +## v5.7.62 + +- Update version of editor and components +- Upload imports to use UUID based path +- Fix issue where all keywords couldnt be removed from a flow + +## v5.7.61 + +- Remove old editor, redirect editor_next to editor + +## v5.7.60 + +- Fix contact imports from CSV files +- Tweaks to import UI + +## v5.7.59 + +- Imports 2.0 + +## v5.7.55 + +- Use v13 flow as example on definitions endpoint docs +- Add URNs field to FlowStart and pass to mailroom so that it creates contacts + +## v5.7.54 + +- Update editor to get support for expressions in add to group actions +- Remove unused localized text on Msg and Broadcast + +## v5.7.52 + +- Migrations and models for new imports + +## v5.7.51 + +- Add plan_start, calculate active contacts in plan period, add to OrgActivity +- Tweak how mailroom_db creates extra group contacts +- Update to latest django-hamlpy + +## v5.7.50 + +- Optimizations for orgs with many contact fields + +## v5.7.49 + +- Update plan_end when suspending topup orgs +- Suspend topup orgs that have no active credits +- Show suspension header when an org is suspended +- Tweak external channel config styling +- Fix styles for button on WA config page + +## v5.7.48 + +- Fix button style for channel extra links +- Skip components missing text for WA templates sync +- Editors should have API tokens + +## v5.7.47 + +- Queue mailroom task to schedule campaign events outside of import transaction +- Fix margin on fields warning alert + +## v5.7.46 + +- Use mailroom task for scheduling of campaign events + +## v5.7.45 + +- Make sure form.\_errors is a list + +## v5.7.44 + +- Add index to enforce uniqueness for event fires + +## v5.7.43 + +- Fix migration + +## v5.7.42 + +- Bump smartmin to 2.2.3 +- Fix attachment download and pdf links + +## v5.7.41 + +- Fix messages to send without topup, and migrations +- No topup transfers on suborgs, show contacts, not credits + +## v5.7.40 + +- Invalid language codes passed to contact API endpoint should be ignored and logged for now + +## v5.7.39 + +- Update widget focus and borders on legacy editor +- Show global form errors and pre-form on modax template + +## v5.7.38 + +- Add alpha sort and search to results view +- Searchable contact fields and wired listeners after group changes +- Force policy redirect on welcome page, honor follow-on navigation redirect +- Use mailroom for contact creation in API and mailroom_db command +- Adjust styling for contact import scenarios +- Show address when it doesn't match channel name + +## v5.7.37 + +- add topup button to topup manage page + +## v5.7.36 + +- Fix deleting ticketers + +## v5.7.35 + +- Zendesk file view needs to be csrf exempt +- Use mailroom to create contacts from UI + +## v5.7.34 + +- Add view to handle file URL callbacks from Zendesk + +## v5.7.33 + +- Fix delete button on archived contacts page +- Don't allow saving queries that aren't supported as smart groups +- Delete no longer used contacts/fields.py +- Fix contacts reppearing in ES searches after being modified by a bulk action +- Adjust pjax block for contact import block + +## v5.7.32 + +- Modal max-height in vh to not obscure buttons + +## v5.7.31 + +- Add padding for p tags on policies + +## v5.7.30 + +- Add content guideline policy option, update styling a bit + +## v5.7.29 + +- Sitewide refresh of styles using Tailwind + +## v5.7.27 + +- Site refresh of styles using Tailwind. + +## v5.7.28 + +- Update to flow editor v1.9.15 + +## v5.7.27 + +- Update to flow editor v1.9.14 +- Add support for last_seen_on in legacy search code + +## v5.7.26 + +- Handle large deletes of contacts in background task + +## v5.7.25 + +- Fix bulk actions against querysets from ES searches +- Fix bulk action permissions on contact views + +## v5.7.24 + +- Rename existing 'archive' contact action in API to 'archive_messages' +- Allow deleting of all contacts from Archived view + +## v5.7.23 + +- Rename All Contacts to Active +- Add UI for archiving, restoring and deleting contacts + +## v5.7.22 + +- Bump version of mailroom and indexer used for tests +- Drop no longer used is_blocked and is_stopped fields + +## v5.7.21 + +- Add missing migration from last rev + +## v5.7.20 + +- Add missing migration + +## v5.7.19 + +- Make contact.is_stopped and is_blocked nullable and stop writing + +## v5.7.18 + +- Update sys group trigger to handle archiving + +## v5.7.17 + +- Migration to add Archived sys group to all orgs + +## v5.7.16 + +- Update to flow editor 1.9.11 +- Update database triggers to use contact status instead of is_blocked or is_stopped +- Make contact.status non-null +- Create new archived system group for new orgs + +## v5.7.15 + +- Add nag warning to legacy editor + +## v5.7.14 + +- Migration to backfill contact status + +## v5.7.13 + +- Enable channelback files for Zendesk ticketers +- Set status as active for new contacts +- Add new status field to contact +- Fix legacy editor by putting html-tag block back +- Change the label for CM channel claim + +## v5.7.12 + +- Fix imports that match by UUID +- Fix Nexmo search numbers and claim number +- Use Django language code on html tag +- Add support for ClickMobile channel type + +## v5.7.11 + +- Fix creating of campaign events based on last_seen_on +- Tweak msg_console so it can include sent messages which are not replies +- Fix mailroom_db command +- Expose last_seen_on on contact API endpoint + +## v5.7.10 + +- Update floweditor to 1.9.10 +- Add Last Seen On as a system field so it can be used in campaigns +- Tweak search_archives command to allow JSONL output + +## v5.7.9 + +- Fix reading of S3 event streams +- Migration to populate contact.last_seen_on from msg archives + +## v5.7.8 + +- Add plan_end field to Orgs + +## v5.7.7 + +- Add search archives management command + +## v5.7.6 + +- Optimizations to migration to backfill last_seen_on + +## v5.7.5 + +- Add migration to populate contact.last_seen_on +- Update to latest temba-components with support for refresh work + +## v5.7.4 + +- Use new metadata field from mailroom searching endpoints +- Make sure we have only one active trigger when importing flows +- Fix org selector and header text alignment when editor is open + +## v5.7.3 + +- Add contact.last_seen_on +- Bump floweditor to v1.9.9 + +## v5.7.2 + +- Add error messages for all error codes from mailroom query parsing +- Fix org manage quick searches +- Always use mailroom for static group changes + +## v5.7.1 + +- Add session history field to flowstarts +- Have mailroom reset URNs after contact creation to ensure order is correct + +## v5.7.0 + +- Add start_type and created_by to queued flow starts +- New mixin for list views with bulk actions +- Update some dependencies to work with Python 3.8 and MacOS + +## v5.6.5 + +- Set the tps options for Twilio based on country and number type +- Fix wit.ai classifiers and double logging of errors on all classifier types + +## v5.6.3 + +- Add variables for nav colors + +## v5.6.2 + +- Fix failing to manage logins when the we are logged in the same org + +## v5.6.1 + +- instead of dates, keep track of seen runs when excluding archived runs from exports + +## v5.6.0 + +- 5.6.0 Release Candidate + +## v5.5.78 + +- Improve the visuals and guides on the FBA claim page +- Block flow starts and broadcasts for suspended orgs +- Add a way to suspend orgs from org manage page + +## v5.5.77 + +- Subscribe to the Facebook app for webhook events + +## v5.5.76 + +- Add Facebook App channel type + +## v5.5.75 + +- always update both language and country if different + +## v5.5.74 + +- allow augmentation of templates with new country + +## v5.5.73 + +- Add support for urn property in search queries +- Add support for uuid in search queries +- Set country on WhatsApp templates syncing and add more supported languages +- Add country on TemplateTranslation + +## v5.5.72 + +- Use modifiers for field value updates + +## v5.5.71 + +- Fix to allow all orgs to import flows + +## v5.5.70 + +- Use modifiers and mailroom to update contact URNs + +## v5.5.69 + +- Refresh contact after letting mailroom make changes +- Contact API endpoint can't call mailroom from within a transaction + +## v5.5.68 + +- Fix contact update view +- Allow multi-user / multi-org to be set on each org +- Fix additional urls import + +## v5.5.66 + +- Implement Contact.update_static_groups using modifiers +- Consistent use of account/login/workspace + +## v5.5.64 + +- Fix editor + +## v5.5.63 + +- Make new org fields non-null and remove no longer needed legacy method + +## v5.5.62 + +- Rename whitelisted to verified +- Add migration to populate new org fields + +## v5.5.61 + +- Add new boolean fields to org for suspended, flagged and uses_topups and remove no longer used plan stuff + +## v5.5.60 + +- Move webhook log button to flow list page +- Add confirmation dialog to handle flow language change + +## v5.5.59 + +- Update to floweditor v1.9.8 + +## v5.5.58 + +- Update to floweditor 1.9.7 +- Remove BETA gating for tickets + +## v5.5.57 + +- Restore logic for when dashboard and android nav icons should appear +- Add translations in ru and fr + +## v5.5.56 + +- Improvements to ticketer connect views +- Still need to allow word only OSM ids + +## v5.5.55 + +- Fix boundaries URL regex to accept more numbers + +## v5.5.54 + +- Add index for mailroom looking up tickets by ticketer and external ID +- Make it easier to differentiate open and closed tickets +- Update to temba-components 0.1.7 for chrome textinput fix + +## v5.5.53 + +- Add indexes on HTTP log views +- Simplify HTTP log views for different types whilst given each type its own permission + +## v5.5.52 + +- More ticket view tweaks + +## v5.5.51 + +- Tweak zendesk manifest view + +## v5.5.50 + +- Tweak zendesk mailroom URLs + +## v5.5.49 + +- Store brand name in mailgun ticketer config to use in emails from mailroom + +## v5.5.48 + +- Defer to mailroom for ticket closing and reopening + +## v5.5.47 + +- Beta-gated views for Mailgun and Zendesk ticketers + +## v5.5.46 + +- Bump black version +- Fix layering of menu with simulator + +## v5.5.45 + +- Increase the template name field to accept up to 512 characters +- Make sending of Stripe receipts optional +- Add OrgActivity model that tracks contacts, active contacts, incoming and outgoing messages + +## v5.5.43 + +- Fix JS escaping on channel log page + +## v5.5.42 + +- Remove csrf exemption for views that don't need it (all our pjax includes csrf) +- Escape translations in JS literals +- Upgrade FB graph API to 3.3 + +## v5.5.41 + +- Use branding keys when picking which orgs to show on manage + +## v5.5.40 + +- Allow branding to have aliases +- Fix bug of removing URNs when updating fields looking up by URN + +## v5.5.39 + +- Update to floweditor 1.9.6 +- New task to track daily msgs per user for analytics +- Add support for Russian as a UI language +- Models and editor API endpoint for tickets +- Skip duplicate relayer call events + +## v5.5.38 + +- Update to flow editor 1.9.5 +- Allow custom TS send URLs + +## v5.5.37 + +- Remove all uses of \_blank frame name +- Strip exif data from images + +## v5.5.36 + +- Better tracking of channel creation and triggers, track simulation +- Do not use font checkboxes for contact import extra fields + +## v5.5.35 + +- Revert Segment.io identify change to stay consistent with other tools + +## v5.5.34 + +- Identify users in Segment.io using best practice of user id, not email + +## v5.5.33 + +- Add context processor to stuff analytics keys into request context +- Restrict 2FA functionality to BETA users + +## v5.5.32 + +- Add basic 2FA support + +## v5.5.31 + +- Update to latest smartmin + +## v5.5.30 + +- Add new flow start type to record that flow was started by a Zapier API call +- Contact bulk actions endpoint should error if passed no contacts +- Remove mentioning the countries for AT claim section +- Add Telesom channel type + +## v5.5.29 + +- Fix trimming flow starts with start counts + +## v5.5.28 + +- Update Africa's Talking supported countries + +## v5.5.27 + +- Remove temporary NOOP celery tasks +- Drop Contact.is_paused field +- Editor 1.9.4, better modal centering + +## v5.5.26 + +- Add NOOP versions of renamed celery tasks to avoid problems during deploy + +## v5.5.23 + +- Remove default value on Contact.is_paused so it can be dropped +- Trim completed mailroom created flow starts +- Update flow starts API endpoint to only show user created flow starts and add index + +## v5.5.22 + +- Add nullable contact.is_paused field +- Display run count on flow start list page + +## v5.5.21 + +- Optimze flow start list page with DB prefetching +- Indicate on flow start list page where start was created by an API call + +## v5.5.20 + +- Use actual PO library to check for msgid differences +- Migration to backfill FlowStart.start_type +- Log error of WA channel failing to sync templates + +## v5.5.19 + +- Add FlowStart.start_type +- Ensure flow starts created via the API are only sent to mailroom after the open transaction is committed + +## v5.5.18 + +- Add flow start log page + +## v5.5.17 + +- Add index to list manually created flow starts +- Make FlowStart.org and modified_on non-NULL +- Move contact modification for name and language to be done by mailroom + +## v5.5.16 + +- bower no longer supported for package installs +- Migration to backfill FlowStart.org and modified_on + +## v5.5.15 + +- Update to flow-editor 1.9.2, security patches + +## v5.5.14 + +- Ensure IVR retry is preserved on new revisions +- Import flows for mailroom test db as v13 +- Make UUID generation fully mockable +- Add run UUID on flow results exports +- Drop unused fields on FlowStart and add org + +## v5.5.13 + +- Stop using FlowStart.modified_on so that it can be removed +- Disable syncing templates with variables in headers and footers + +## v5.5.12 + +- Import and export of PO files + +## v5.5.10 + +- Bump up the simulator when popped so it fits on more screens +- Editor performance improvements + +## v5.5.8 + +- Update help text on contact edit dialog +- Add prometheus endpoint config on account page +- Fix boundary aliases filtering by org + +## v5.5.7 + +- Fix open modal check on pjax refersh +- Show warnings on contact field page when org is approaching the limit and has hit the limit + +## v5.5.6 + +- Temporaly disable templates requests to FB when claiming WA channels + +## v5.5.5 + +- newest smartmin with BoM fix + +## v5.5.4 + +- Show better summary of schedules on trigger list page +- Fix display of trigger on contact group delete modal + +## v5.5.3 + +- Update to floweditor 1.8.9 +- Move EX constants to channel type package +- Remove unused deps and address npm security warnings +- Add 18 hours as flow expiration option +- FlowCRUDL.Revisions should return validation errors from engine as detail field +- Allow setting authentication header on External channels +- Add normalize contact tels task +- Drop full resolution geometry, only keep simplified +- Add attachments columns to flow results messages sheet + +## v5.5.0 + +- Increase the WA channels tps to 45 by default + +## v5.4.13 + +- Fix URL related test errors + +## v5.4.12 + +- Don't allow localhost for URL fields + +## v5.4.11 + +- Make sure external channel URLs are external + +## v5.4.10 + +- Complete FR translations +- Update to floweditor 1.8.8 + +## v5.4.9 + +- Fix submitting API explorer requests where there is no editor for query part +- Lockdown redirects on exports +- Add more detailed fresh chat instructions + +## v5.4.8 + +- Find and fix more cases of not filtering by org + +## v5.4.7 + +- Fix org filtering on updates to globals +- Fix campaign event update view not filtering by event org +- Fix error in API contact references when passed a JSON number +- Replace Whatsapp by WhatsApp + +## v5.4.6 + +- Merge pull request #2718 from nyaruka/fe187 + +## v5.4.4 + +- fix various filtering issues + +## v5.4.3 + +- Update sample flow test + +## v5.4.2 + +- remove use of webhook where not appropriate + +## v5.4.1 + +- Update sample flows to use @webhook instead of @legacy_extra + +## v5.4.0 + +- Add API endpoint to update Globals +- Keep latest sync event for Android channels when trimming + +## v5.3.64 + +- Add support for Twilio Whatsapp channel type + +## v5.3.63 + +- Add pre_deploy command to check imports/exports +- Fix link to android APK downloads on claim page + +## v5.3.62 + +- Temporarily disable resume imports task + +## v5.3.61 + +- Fix text of save as group dialog +- Add support to restart export tasks that might have been stopped by deploy + +## v5.3.60 + +- Update to latest mailroom +- Add urns to runs API endpoint + +## v5.3.59 + +- Update to latest mailroom which returns allow_as_group from query parsing +- Don't create missing contact fields on flow save + +## v5.3.57 + +- Update flow editor 1.7.16 +- Fix translations on external channel claim page +- Add tabs to toggle between full flow event history and summary of messages +- Increase the max height on the flow results export modal dialog + +## v5.3.56 + +- Add params to flow starts API +- Change name of org_id param in calls to flow/inspect +- Add quick replies variable to external channel claim page + +## v5.3.55 + +- Allow editing of allow_international on channel update forms +- Use consistent format for datetimes like created_on on contact list page + +## v5.3.54 + +- Hide loader on start flow dialog when there are no channels + +## v5.3.53 + +- Fix creation of Android channels + +## v5.3.52 + +- Convert Android to dynamic channel type + +## v5.3.51 + +- Update to floweditor 1.7.15 +- Add python script to do all CI required formatting and locale rebuilding +- Use mailroom for query parsing for contact exports +- Fix text positioning on list pages +- Fix delete contact group modal buttons when blocked by dependencies +- Completion with upper case functions + +## v5.3.50 + +- Migration to set allow_international=true in configs of existing tel channels +- Remove no longer used flow definition caching stuff + +## v5.3.49 + +- Use realistic phone numbers in mailroom test db +- Remove contact filtering from flow results page +- Add migration to populate Flow.template_dependencies + +## v5.3.48 + +- Use mailroom searching for omnibox results + +## v5.3.47 + +- Add template_dependencies m2m + +## v5.3.46 + +- Do not subject requests to the API with sessions to rate limiting +- Migration to convert flow dependencies metadata to new format +- Update description on the flow results export to be clear + +## v5.3.45 + +- Fix deletion of orgs and locations so that aliases are properly deleted +- Remove syntax highlighting in API explorer as it can't handle big responses +- Use new dependencies format from mailroom + +## v5.3.44 + +- Dynamic group creation / reevaluation through Mailroom + +## v5.3.43 + +- Update to latest mailroom + +## v5.3.42 + +- Fix actions on blocked contact list page + +## v5.3.41 + +- Disable simulation for archived flows +- Fix query explosion on Android channel alerts + +## v5.3.40 + +- Add subflow parameters to editor + +## v5.3.39 + +- Rework migration code so new flows are migrated too + +## v5.3.38 + +- Use mailroom for contact searches, contact list pages and flow starts via search + +## v5.3.35 + +- Rebuild components + +## v5.3.34 + +- Update to flow editor 1.7.13 +- Don't include 'version' in current definitions +- Migrate imports of flows to new spec by default + +## v5.3.30 + +- Exclude inactive template translations from API endpoint + +## v5.3.29 + +- Fix edge case for default alias dialog +- Add sending back to contact list page +- Save parent result refs in flow metadata +- Change name BotHub to Bothub + +## v5.3.28 + +- remove auto-now on modified_on on FlowRun + +## v5.3.27 + +- Update to floweditor 1.7.9 +- Warn users if starting for facebook without a topic + +## v5.3.26 + +- Allow arbitrary numbers when sending messages +- Componentized message sending + +## v5.3.25 + +- Show empty message list if we have archived them all +- Update to flow editior 1.7.8 +- Replace flow/validate call to mailroom with flow/inspect +- Add facebook topic selection + +## v5.3.24 + +- Pass version to mailroom migrate endpoint +- Fix saving on alias editor +- Support the whatsapp templates HEADER and FOOTER components +- Write HTTP log for errors in connection + +## v5.3.23 + +- Add support for whatsapp templates with headers and footers +- Make sure we have one posterizer form and we bind one click event handler for posterize links + +## v5.3.22 + +- Convert add/edit campaign event to components + +## v5.3.21 + +- Add UI for managing globals + +## v5.3.16 + +- Update to flow editor v1.7.7 + +## v5.3.13 + +- Update to floweditor v1.7.5 +- Re-add msg_console management command with new support for mailroom +- Cleanup somes usages of trans/blocktrans + +## v5.3.12 + +- Add error and failure events to contact history +- Use form components on campaign create/update + +## v5.3.11 + +- Migrate sample flows to new editor +- Localize URNs in API using org country +- Write HTTPLogs for Whatsapp template syncing +- Remove Broadcast recipient_count field + +## v5.3.10 + +- Add read API endpoint for globals + +## v5.3.9 + +- Add trimming task for flow revisions +- Add models for globals support +- Add FreshChat channel support + +## v5.3.8 + +- Make sure imported flows are unarchived +- Validate we do not have a caller on a channel before adding a new one + +## v5.3.7 + +- Release URNs on Org release + +## v5.3.6 + +- Release Channel sync events and alarms + +## v5.3.5 + +- release Campaigns when releasing Orgs + +## v5.3.4 + +- Release flow starts when releasing flows + +## v5.3.3 + +- Add releasing to Classifiers and HTTPLogs + +## v5.3.2 + +- Allow manual syncing of classifiers + +## v5.3.1 + +- Update documentation for FB webhook events to subscribe to + +## v5.3.0 + +- Fix DT One branding and add new icon +- Fix validation problem on update schedule trigger form +- Use brand when granting orgs, not host +- Update contactsql parser to support same quotes escaping as goflow + +## v5.2.6 + +- Change slug for Bothub classifier to 'bothub' + +## v5.2.5 + +- Fix various Schedule trigger UI validation errors +- Fix intermittently failing excel export tests +- Add noop reverse in migration + +## v5.2.1 + +- Fix order of Schedule migrations (thanks @matmsa27) + +## v5.2.0 + +- Show date for broadcast schedules +- Honor initial datetime on trigger schedule ui + +## v5.1.64 + +- Update to flow editor version 1.7.3 +- Fix weekly buttons resetting on trigger schedule form validation +- Validate schedule details on schedule trigger form +- Show query editors in contact search +- Add migration to fix schedules with None/NaN repeat_days_of_week values +- Move IE9 shim into the main template header +- Update README with final 5.0 versions + +## v5.1.63 + +- Update to flow editor v1.7.2 + +## v5.1.62 + +- Validate repeat_days_of_week when updating schedules +- Include airtime transfers in contact history + +## v5.1.61 + +- Tweak styling on contact field list page +- Send test email when the SMTP server config are set + +## v5.1.60 + +- Add Bothub classifier type + +## v5.1.59 + +- Update flow editor to version 1.7.0 +- Add Split by Intent action in flows +- Update Send Airtime action for use with DTOne + +## v5.1.58 + +- Unify max contact fields +- Don't allow deletion of flow labels with children +- Rename TransferTo to DTOne + +## v5.1.57 + +- Check pg_dump version when creating dumps +- Add missing block super in extra script blocks +- Fix omnibox being not actually required on send message form +- Rework airtime transfers to have separate http logs +- Allow flow starts by query + +## v5.1.55 + +- Sync intents on classifier creation +- Trim HTTP logs older than 3 days + +## v5.1.54 + +- remove fragile AT links to configuration pages +- Exclude hidden results from flow results page +- Exclude results with names starting with \_ from exports + +## v5.1.53 + +- Classifier models and views +- HTTPLog models and views + +## v5.1.52 + +- add prefetch to retry + +## v5.1.51 + +- Add ThinQ Channel Type + +## v5.1.50 + +- Fix contact history rendering of broadcast messages with null recipient count +- Fix for start_session action in the editor + +## v5.1.49 + +- Fire schedules in Mailroom instead of celery + +## v5.1.48 + +- Rework contact history to include engine events + +## v5.1.47 + +- Update to flow editor 1.6.20 + +## v5.1.46 + +- Rev Flow Editor v1.6.19 + +## v5.1.45 + +- Fix rendering of campaigns on export page +- Fix ivr channel logs +- Make FlowRun.status non-NULL +- Make FlowSession.uuid unique and indexed + +## v5.1.44 + +- Tidy up fields on flow activity models + +## v5.1.43 + +- Fix styling on create flow dialog +- Make user fields nullable on broadcasts +- Populate repeat_minute_of_hour in data migration + +## v5.1.42 + +- Update trigger update views to take into account new schedule fields + +## v5.1.41 + +- Update docs on flow start extra to be accessible via @trigger +- Change input selector to work cross-browser on send modal +- Don't inner scroll for modax fetches + +## v5.1.40 + +- Fix issues with web components in Microsoft Edge + +## v5.1.37 + +- Cleanup Schedule class +- Drop unused columns on FlowRun +- Remove legacy engine code +- Remove legacy braodcast and message sending code + +## v5.1.36 + +- Temporarily disable compression for components JS + +## v5.1.33 + +- Use new expressions for campaign message events, broadcasts and join group triggers +- List contact fields with new expression syntax and fix how campaign dependencies are rendered + +## v5.1.28 + +- Use mailroom to interrupt runs when archiving or releasing a flow +- Re-organize legacy engine code +- Initial library of web components + +## v5.1.27 + +- Update to floweditor 1.6.13 +- Allow viewers to do GETs on some API endpoints + +## v5.1.26 + +- Fix rendering of campaign and event names in UI +- Move remaining channel client functionality into channel type packages +- Remove unused asset server stuff + +## v5.1.25 + +- Update floweditor to 1.6.12 +- Allow viewing of channel logs in anonymous orgs with URN values redacted + +## v5.1.24 + +- Cleanup campaighn models fields + +## v5.1.23 + +- Really fix copying of flows with nameless has_group tests and add a test this time + +## v5.1.22 + +- Remove trigger firing functionality (except schedule triggers) and drop unused fields on trigger + +## v5.1.21 + +- Migration to backfill FlowRun.status + +## v5.1.20 + +- Limit group fetching to active groups +- Get rid of caching on org object as that's no longer used needed +- Fix importing/copying flows when flow has group dependency with no name + +## v5.1.19 + +- Migration to add FlowRun.status + +## v5.1.18 + +- Cleanup fields on FlowRun (single migration with no real SQL changes which can be faked) + +## v5.1.17 + +- Remove all IVR flow running functionality which is now handled by mailroom + +## v5.1.15 + +- Update to flow editor v1.6.11 +- Releasing Nexmo channel shouldn't blow up if application can't be deleted on Nexmo side + +## v5.1.14 + +- Fix Nexmo IVR to work with mailroom +- Add migration to populate session UUIDs +- Update to Django 2.2 +- Send topup expiration emails to all org administrators + +## v5.1.12 + +- Drop ActionLog model +- Switch to new editor as the default, use v1.6.10 +- Add query field to FlowStart + +## v5.1.11 + +- Add FlowSession.uuid which is nullable for now +- Update to floweditor 1.6.9, scrolling rules + +## v5.1.10 + +- Update to flow editor 1.6.8, add completion config +- Add FlowStart.parent_summary, start deprecating fields +- Switch to bionic beaver for CI builds +- Add trigger params access to ivr flow +- Drop no longer used Broadcast.purged field + +## v5.1.9 + +- Make Broadcast.purged nullable in preparation for dropping it + +## v5.1.8 + +- Update floweditor to 1.6.7 and npm audit + +## v5.1.7 + +- Remove unused IVR tasks +- Simplify failed IVR call handling + +## v5.1.6 + +- Fix format_number to be able to handle decimals with more digits than current context precision + +## v5.1.5 + +- Update to flow editor 1.6.6 + +## v5.1.4 + +- Update to flow editor 1.6.5 +- Update Django to 2.1.10 + +## v5.1.3 + +- Update flow editor to 1.6.3 + +## v5.1.2 + +- Remove fields no longer needed by new engine +- Trim sync events in a separate task + +## v5.1.1 + +- Stop writing legacy engine fields and make them nullable +- Remove no longer used send_broadcast_task and other unused sending code +- Squash migrations into previously added dummy migrations + +## v5.1.0 + +- Populate account sid and and auth token on twilio callers when added +- Disable legacy IVR tasks + +## v5.0.9 + +- Add dummy migrations for all migrations to be created by squashing + +## v5.0.8 + +- Update recommended versions in README +- Fix API runs serializer when run doesn't have category (i.e. from save_run_result action) +- Update to latest floweditor +- Update search parser to convert timestamps into UTC + +## v5.0.7 + +- Force a save when migrating flows + +## v5.0.6 + +- Show search error if input is not a date +- Group being imported into should be in state=INITIALIZING whilist being populated, and hide such groups in the UI +- Only add initially changed files in post-commit hook +- Fix to make sure the initial form data is properly shown on signup + +## v5.0.5 + +- sync whatsapp templates with unsupported languages, show them as such + +## v5.0.4 + +- Update to floweditor v1.5.15 +- Add pagination to outbox +- Fix import of contact field when field exists with same name but different key +- Fix (old) mac excel dates in imports + +## v5.0.3 + +- Update flow editor to 1.5.14 + +## v5.0.2 + +- Remove reference to webhook API page which no longer exists +- Update to flow-editor 1.5.12 +- Update some LS libs for security +- Tweaks to migrate_to_version_11_1 to handle "base" as a lang key +- Tweak old flow migrations to allow missing webhook_action and null ruleset labels + +## v5.0.1 + +- Fix max length for WA claim facebook_access_token +- Fix WhatsApp number formatting on contact page, add icon + +## v5.0.0 + +- add validation of localized messages to Travis + +## v4.27.3 + +- Make contact.is_test nullable +- Migration to remove orphaned schedules and changes to prevent creating them in future +- Migration to merge path counts from rules which are merged into a single exit in new engine + +## v4.27.2 + +- fix broadcast API test + +## v4.27.1 + +- temporarily increase throttling on broadcasts endpoint + +## v4.27.0 + +- Cleanup webhook fields left on Org +- Stop checking flow_server_enabled and remove support for editing it + +## v4.26.1 + +- Remove no longer used check_campaigns_task + +## v4.26.0 + +- Remove handling of incoming messages, channel events and campaigns.. all of which is now handled by mailroom + +## v4.25.0 + +- Add sentry error to handle_event_task as it shouldnt be handling anything +- Remove processing of timeouts which is now handled by mailroom +- Start broadcast mailroom tasks with HIGH_PRIORITY +- Fix EX settings page load +- Migration to convert any remaining orgs to use mailroom +- Fix broken links to webhook docs +- Simplify WebHookEvent model + +## v4.23.3 + +- Send broadcasts through mailroom +- Add org name in the email subject for exports +- Add org name in export filename + +## v4.24.0 + +- Add org name in the export email subject and filename +- Update flow editor to 1.5.9 +- Remove functionality for handling legacy surveyor submissions + +## v4.23.1 + +- Make exported fields match goflow representation and add .as_export_ref() to exportable classes +- Update to latest floweditor v1.5.5 +- Persist group and field definitions in exports +- Add support for SignalWire (https://signalwire.com) for SMS and IVR + +## v4.23.0 + +- Save channel and message label dependencies on flows + +## v4.22.63 + +- Update to latest floweditor v1.5.5 +- Allow switching between editors +- Update Django to version 2.1.9 + +## v4.22.62 + +- add US/ timezones for clicksend as well + +## v4.22.61 + +- add clicksend channel type + +## v4.22.60 + +- Update flow editor to 1.5.4 +- Allow imports and exports of v13 flows + +## v4.22.55 + +- Enable export of new flows +- Update Nexmo supported countries list + +## v4.22.54 + +- rename migration, better printing + +## v4.22.53 + +- add migration to repopulate metadata for all flows + +## v4.22.52 + +- Expose result specs in flow metadata on flows API endpoint +- Use Temba JSON adapter when reading JSON data from DB +- Don't update TwiML channel when claiming it +- Use most recent topup for credit transfers between orgs + +## v4.22.51 + +- Update to flow-editor 1.5.3 + +## v4.22.50 + +- Update to floweditor v1.5.2 + +## v4.22.49 + +- Only do mailroom validation on new flows + +## v4.22.48 + +- Fix 11.12 migration and importing flows when flow contains a reference to a channel in a different org +- Make WhatsApp endpoint configurable, either FB or self-hosted + +## v4.22.47 + +- tweak to WA language mapping + +## v4.22.46 + +- add hormuud channel type +- newest editor +- update invitation secret when user is re-invited + +## v4.22.45 + +- Tweak compress for vendor + +## v4.22.44 + +- Update to flow editor 1.4.18 +- Add mailroom endpoints for functions, tweak styles for selection +- Honor is_active when creating contact fields +- Cache busting for flow editor + +## v4.22.43 + +- Update flow editor to 1.4.17 +- Warn users when starting a flow when they have a WhatsApp channel that they should use templates + +## v4.22.42 + +- add page to view synched WhatsApp templates for a channel + +## v4.22.41 + +- Update flow editor to 1.4.16 +- View absolute attachments in old editor + +## v4.22.40 + +- Update editor to 1.4.14 + +## v4.22.39 + +- latest editor + +## v4.22.38 + +- update defs with db values both when writing and reading +- remove clearing of external ids for messages + +## v4.22.37 + +- Update to flow-editor 1.4.12 +- Remove footer gap on new editor + +## v4.22.36 + +- allow Alpha users to build flows in new editor +- don't use RuleSets in figuring results, exports, categories + +## v4.22.28 + +- Adjust `!=` search operator to include unset data +- Remove broadcast recipients table +- IMPORTANT \* You must make sure that all purged broadcasts have been archived using + rp-archiver v1.0.2 before deploying this version of RapidPro + +## v4.22.27 + +- styling tweaks to contacts page + +## v4.22.26 + +- Always show featured ContactFields on Contact.read page +- Do not migrate ruleset with label null and action msg text null + +## v4.22.25 + +- only show pagination warning when we have more than 10k results + +## v4.22.24 + +- support != search operator + +## v4.22.23 + +- simplify squashing of squashable models +- show a notification when users open the last page of the search +- update `modified_on` once msgs export is finished + +## v4.22.22 + +- Fix issue with pagination when editing custom fields + +## v4.22.21 + +- Add new page for contact field management + +## v4.22.20 + +- add management command to reactivate fb channels + +## v4.22.19 + +- api for templates, add access token and fb user id to claim, sync with facebook endpoint + +## v4.22.18 + +- fix recalculating event fires for fields when that field is created_on + +## v4.22.17 + +- Don't overwrite show_in_table flag on contact import +- Prevent updates of contact field labels when adding a field to a flow +- Add migration to populate results and waiting_exit_uuids in Flow.metadata + +## v4.22.15 + +- Do not immediately expire flow when updating expirations (leave that to mailroom) +- Fix boundary aliases duplicates creation +- Add org lock for users to deal with similtaneous updates of org users +- Add results and waiting_exit_uuids to flow metadata and start populating on Flow.update + +## v4.22.14 + +- CreateSubOrg needs to be non-atomic as well as it creates flows which need to be validated +- Remove unused download view + +## v4.22.13 + +- allow blank pack, update permissions + +## v4.22.12 + +- remove APK read view, only have update +- allow setting pack number + +## v4.22.11 + +- Add APK app and new Android claiming pipeline for Android Relayer + +## v4.22.10 + +- Use output of flow validation in mailroom to set flow dependencies +- Make message_actions.json API endpoint support partial updates +- Log to librato only pending messages older than a minute + +## v4.22.6 + +- Add Viber Welcome Message event type and config +- More customer support service buttons + +## v4.22.5 + +- queue incoming messages and incoming calls from relayer to mailroom + +## v4.22.4 + +- Temporarily disable flow validation until we can fix it for new orgs + +## v4.22.3 + +- Lazily create any dependent objects when we save +- MAILROOM_URL in settings.py.dev should default to http://localhost:8090 +- Call to mailroom to validate a flow before saving a new definition (and fix invalid flows in our tests) + +## v4.22.2 + +- Fix schedule next fire calculation bug when schedule is greater than number of days +- Fix to allow archiving flow for removed(inactive) campaign events +- Strip resthook slug during creation +- Ignore request from old android clients using GCM + +## v4.22.1 + +- Increase the schedule broadcast text max length to be consistent on the form + +## v4.22.0 + +- Fix case of single node flow with invalid channel reference +- Remove ChannelConnection.created_by and ChannelConnection.is_active +- Fix flow export results to include results from replaced rulesets + +## v4.21.15 + +- correct exclusion + +## v4.21.14 + +- Dont requeue flow server enabled msgs +- Exit sessions in bulk exit, ignore mailroom flow starts + +## v4.21.13 + +- Fix import with invalid channel reference +- Add flow migration to remove actions with invalid channel reference + +## v4.21.12 + +- improve simulator for goflow simulation + +## v4.21.11 + +- work around JS split to show simulator images + +## v4.21.10 + +- display attachments that are just 'image:' + +## v4.21.9 + +- simulator tweaks +- show Django warning if mailroom URL not configured + +## v4.21.8 + +- make sure we save flow_server_enabled in initialize + +## v4.21.7 + +- Update status demo view to match the current webhook posted data +- Remove all remaining reads of contact.is_test + +## v4.21.6 + +- Use pretty datetime on contact page for upcoming events + +## v4.21.5 + +- Replace final index which references contact.is_test +- Fix labels remap on flow import + +## v4.21.4 + +- All new orgs flow server enabled +- Fallback to org domain when no channe domain set + +## v4.21.3 + +- Remove all remaining checks of is_test, except where used in queries +- Update contact indexes to not include is_test +- Prevent users from updating dynamic groups if query is invalid +- Update Python module dependencies + +## v4.21.2 + +- set country code on test channel + +## v4.21.1 + +- do not log errors for more common exceptions + +## v4.21.0 + +- Include fake channel asset when simulating +- Add test for event retrying, fix out of date model +- Stop checking contact.is_test in db triggers + +## v4.20.1 + +- Remove unused fields on webhookevent +- Default page title when contact has no name or URN (e.g. a surveyor contact) + +## v4.19.7 + +- fix simulator to allow fields with empty value +- remove remaining usages of test contacts for testing + +## v4.19.6 + +- add incoming_extra flow to mailroom test +- fix for test contact deletion migration + +## v4.19.5 + +- pass extra to mailroom start task + +## v4.19.4 + +- Support audio/mp4 as playable audio +- Add migration to remove test contacts + +## v4.19.3 + +- Ensure scheduled triggers start flows in mailroom if enabled + +## v4.19.2 + +- remap incoming ivr endpoints for Twilio channels when enabling flow server +- interrupt flow runs when enabling flow server +- add enable_flow_server method to org, call in org update view + +## v4.19.1 + +- Scope API throttling by org and user +- Add export link on campaign read page +- Fix SMTP serever config to percentage encode slashes + +## v4.19.0 + +- Add session_type field on FlowSession +- Use provided flow definition when simulating if provided +- Remove USSD app completely +- Adjust broadcast status to API endpoint +- Remove legacy (non-mailroom) simulation + +## v4.18.0 + +- Make ChannelConnection.is_active nullable so it can be eventually removed +- Replace traceback.print_exc() with logger.error +- Make sure contacts ids are iterable when starting a flow +- Remove USSD proxy model + +## v4.17.0 + +- Use URL kwargs for channel logs list to pass the channel uuid +- Fix message campaign events on normal flows not being skipped +- Default to month first date format for US timezones +- Make Contact.created_by nullable +- Fix to prevent campaign event to create empty translations +- Use new editor wrapper to embed instead of building +- Remove USSD functionality from engine + +## v4.16.15 + +- Fix Stripe integration + +## v4.16.14 + +- fix webhook bodies to be json + +## v4.16.13 + +- better request logging for webhook results + +## v4.16.12 + +- further simplication of webhook result model, add new read and list pages + +## v4.16.11 + +- add org field to webhook results + +## v4.16.10 + +- Add surveyor content in mailroom_db command +- Fix flows with missing flow_type +- Update more Python dependencies +- Prevent flows of one modality from starting subflows of a different modality + +## v4.16.8 + +- Add support for Movile/Wavy channels +- Switch to codecov for code coverage +- Allow overriding brand domain via env +- Add mailroom_db management command for mailroom tests +- Start flow_server_enabled ivr flows in mailroom +- Remove legacty channel sending code +- Remove flow dependencies when deactivating USSD flows +- Migrations to deactivate USSD content + +## v4.16.5 + +- Fix quick replies in simulator + +## v4.16.4 + +- More teaks to Bongolive channel +- Use mailroom simulation for IVR and Surveyor flows +- Add a way to see all run on flow results runs table + +## v4.16.3 + +- Simplify generation of upload URLs with new STORAGE_URL setting + +## v4.16.2 + +- Switch BL channels used API +- Fix rendering of attachments for mailroom simulation +- Update black to the version 18.9b0 + +## v4.16.0 + +- Fix flow_entered event name in simulator +- Make created_by, modified_by on FlowStart nullable, add connections M2M on FlowStart +- Rename ChannelSession to ChannelConnection + +## v4.15.2 + +- Fix for flow dependency migration +- Fix rendering of single digit hours in pretty_datetime tag +- Use mailroom for flow migration instead of goflow +- Add support for Bongo Live channel type + +## v4.15.1 + +- Include default country in serialized environments used for simulation +- Add short_datetime and pretty_datetime tags which format based on org settings +- Prevent users from choosing flow they are editing in some cases + +## v4.15.0 + +- Fix nexmo claim +- Tweak 11.7 migration to not blow up if webhook action has empty URL +- Bump module minor versions and remove unused modules +- Remove ChannelSession.modified_by + +## v4.14.1 + +- Make older flow migrations more fault tolerant +- Tweaks to migrate_flows command to make error reporting more useful +- Add flow migration to fix duplicate rule UUIDs +- Update python-telegram-bot to 11.1.0 +- Update nexmo to 2.3.0 + +## v4.14.0 + +- Fix recent messages rollover with 0 messages +- Use flowserver only for flow migration +- Make created_by and modified_by optional on channel session + +## v4.13.2 + +- create empty revisions for empty flows +- proper handle of empty errors on index page +- fix error for policy read URL failing +- add quick replies to mailroom simulator + +## v4.13.1 + +- populate simulator environment for triggers and resumes +- honour Flow.is_active on the Web view +- fix android channel release to not throw if no FCM ID +- add Play Mobile aggregator + +## v4.13.0 + +- Add index for fast Android channel fetch by last seen +- Remove gcm_id field +- No messages sheet for flow results export on anon orgs +- Add periodic task to sync channels we have not seen for a while +- Add wait_started_on field to flow session + +## v4.12.6 + +- Remove flow server trialling +- Replace tab characters for GSM7 +- Use mailroom on messaging flows for simulation +- Raise ValidationError for ContactFields with null chars +- upgrade to Django 2.1 + +## v4.12.5 + +- Make sure Flow.update clears prefetched nodes after potentialy deleting them + +## v4.12.4 + +- Fix Flow.update not deleting nodes properly when they change type + +## v4.12.3 + +- Add try/except block on FCM sync +- Issue #828, remove numbers replace + +## v4.12.2 + +- Dont show queued scheduled broadcasts in outbox +- Prevent deleting groups with active campaigns +- Activate support for media attachment for Twitter channels +- Remove ability to create webhook actions in editor +- Add flow migration to replace webhook actions with rulesets + +## v4.12.1 + +- Fix importing campaign events based on created_om +- Fix event fires creation for immutable fields +- Remove WA status endpoint +- Fix IVR runs expiration date initialization +- Add UUID field to org + +## v4.11.7 + +- Interrupt old IVR calls and related flow sessions +- Move webhook docs button from the token view to the webhook view + +## v4.11.6 + +- Faster squashing +- Fix EX bulk sender form fields + +## v4.11.5 + +- simulate flow_server_enabled flows in mailroom + +## v4.11.3 + +- Add session log links to contact history for staff users +- Hide old webhook config page if not yet set + +## v4.11.2 + +- Fix passing false/true to archived param of flows API endpoint + +## v4.11.1 + +- Turn on the attachment support for VP channels +- Tweak 11.6 flow migration so that we remap groups, but never create them +- Flows API endpoint should support filtering by archived and type +- Log how many flow sessions are deleted and the time taken +- Turn on the attachment support for WA channels +- Adjust UI for adding quick replies and attachment in random order + +## v4.11.0 + +- Add index for fetching waiting sessions by contact +- Ensure test_db users have same username and email +- Add index to FlowSession.ended_on +- Make FlowSession.created_on non-null +- Add warning class to skipped campaigns event fire on contact history +- Add fired_result field to campaign event fires + +## v4.10.9 + +- Log and fail calls that cannot be started +- Allow contact.created_on in flows, init new event + +## v4.10.8 + +- Deactivate events when updating campaigns +- Less aggressive event fire recreation +- Use SMTP SERVER org config and migrate old config keys + +## v4.10.4 + +- Retry failed IVR calls + +## v4.10.3 + +- Show all split types on run results, use elastic for searching + +## v4.10.2 + +- Flow migration for mismatched group uuids in existing flows +- Remap group uuids on flow import +- Migration to backfill FlowSession.created_on / ended_on + +## v4.10.1 + +- Add config to specify content that should be present in the response of the request, if not mark that as msg failed +- Allow campaign events to be skipped if contacts already active in flows + +## v4.10.0 + +- Add FlowRun.parent_uuid +- Add FlowSession.timeout_on +- Create new flows with flow_server_enabled when org is enabled +- Add flow-server-enabled to org, dont deal with flow server enabled timeouts or expirations on rapidpro + +## v4.9.2 + +- Fix flowserver resume tests by including modified_on on runs sent to goflow + +## v4.9.1 + +- Dont set preferred channels if they can't send or call +- Don't assume events from goflow have step_uuid +- Add indexes for flow node and category count squashing + +## v4.9.0 + +- Delete event fires in bulk for inactive events +- Fix using contact language for categories when it's not a valid org language +- Fix translation of quick replies +- Add FlowSession.current_flow and start populating +- Refresh contacts list page after managing fields +- Update to latest goflow (no more caller events, resumes, etc) +- Fix flow results export to read old archive format +- Batch event fires by event ID and not by flow ID +- Make campaign events immutable + +## v4.8.1 + +- Add novo channel + +## v4.8.0 + +- Remove trialing of campaign events +- Remove no longer used ruleset_analytis.haml +- Expose @contact.created_on in expressions +- Make Contact.modified_by nullable and stop writing to it +- Optimize group releases +- Add created_on/ended_on to FlowSession + +## v4.7.0 + +- Bump Smartmin and Django versions +- Expose @contact.created_on in expressions +- Make Contact.modified_by nullable and stop writing to it + +## v4.6.0 + +- Latest goflow + +## v4.5.2 + +- Add config for deduping messages +- Add created_on/ended_on to FlowSession +- Update to latest goflow (event changes) +- Do not delete campaign events, deactivate them +- Do not delete runs when deleting a flow +- Fix Campaigns events delete for system flow + +## v4.5.1 + +- Use constants for queue names and switch single contact flow starts to use the handler queue +- Raise ValidationError if flow.extra is not a valid JSON +- Defer group.release in a background task +- Fix saving dynamic groups by reverting back to escapejs for contact group query on dialog + +## v4.5.0 + +- Add Stopped event to message history and unknown/unsupported events +- Switch result value to be status code from webhook rulesets, save body as @extra. and migrate result references to that + +## v4.4.20 + +- Fix channel selection for sending to TEL_SCHEME +- Add campaigns to all test orgs for make_db +- Correctly embed JS in templates +- Escape data before using `mark_safe` + +## v4.4.19 + +- Fix validating URNField when input isn't a string + +## v4.4.18 + +- Fix incorrect units in wehbook_stats +- Result input should always be a string + +## v4.4.17 + +- Don't do duplicate message check for surveyor messages which are already SENT +- Update to goflow 0.15.1 +- Update Location URLs to work with GADM IDs +- Fix potential XSS issue: embed script only if `View.refresh` is set + +## v4.4.16 + +- Fix IVR simulation + +## v4.4.15 + +- Fix importing with Created On columns +- Validate URNs during import +- Classify flow server trials as simple if they don't have subflows etc +- Use latest goflow for testing + +## v4.4.14 + +- Enable import of GADM data using import_geojson + +## v4.4.13 + +- Defer to mailroom for processing event fires for flows that are flowserver enabled +- Tweaks to comparing events during flow server trials +- Fix saved operand for group tests on anon orgs + +## v4.4.12 + +- Add step URN editor completions +- Add name to the channels shown on the flow editor +- Don't zero pad anon ids in context +- Update to latest expressions + +## v4.4.11 + +- Ensure API v1 writes are atomic +- JSONFields should use our JSON encoder +- Use authenticated user for events on Org.signup +- Trial shouldn't blow up if run has no events +- Add urn to step/message context and make urn scheme accessible for anon org +- Get rid of Flow.FLOW + +## v4.4.8 + +- Don't trial flow starts from triggers +- Fix messages from non-interactive subflows being added to their parent run +- Setup user tracking before creating an Org +- Migrate flows during flowserver trials with collapse_exits=false to keep paths exactly the same +- Input for a webhook result test should be a single request +- Migration to update F type flows to M + +## v4.4.7 + +- Enforce validation on OrgSignup and OrgGrant forms +- Cleanup encoding of datetimes in JSON +- New flows should be created with type M and rename constants for clarity + +## v4.4.6 + +- Fix updating dynamic groups on contact update from the UI +- Make editor agnostic to F/M flow types + +## v4.4.5 + +- Remove mage functionality +- Fix Twilio number searching + +## v4.4.2 + +- Use SystemContactFields for Dynamic Groups +- Add our own json module for loads, dumps, always preserve decimals and ordering +- Replace reads of Flow.flow_type=MESSAGE with Flow.is_system=True +- Migration to populate Flow.is_system based on flow_type + +## v4.4.0 + +- Fix intercom ResourceNotFound on Org.Signup +- Remove follow triggers and channel events +- Add Flow.is_system and start populating for new campaign event single message flows + +## v4.3.8 + +- Data migration to deactivate all old style Twitter channels +- Update Nexmo client + +## v4.3.4 + +- Increase IVR logging verbosity +- Trial all campaign message flows in flowserver +- Tweak android recommendation + +## v4.3.3 + +- Run Table should only exclude the referenced run, and include greater Ids +- Raise validation error ehen trying action inactive contacts over API +- Remove uservoice as a dependency +- Update versions of Celery, Postgis, Nexmo, Twilio +- Fix Python 3.7 issues +- Clear out archive org directory when full releasing orgs + +## v4.3.2 + +- Update expressions library to get EPOCH() function + +## v4.3.1 + +- Update to Django 2.0 +- Update postgres adapter to use psycopg2-binary + +## v4.3.0 + +- Wrap asset responses in a results object +- Use trigger type of campaign when starting campign event flows in flowserver +- Fix count for blocktrans to not use string from intcomma +- Use audio/mp4 content type for m4a files + +## v4.2.4 + +- Update to latest goflow and enable asset caching +- Actually fix uploading mp4 files + +## v4.2.2 + +- Show only user fields when updating field values for a contact +- Fix MIME type for M4A files +- Allow test_db command to work without having ES installed + +## v4.2.1 + +- Ignore search exceptions in omnibox +- Actually enable users to use system contact fields in campaign events + +## v4.2.0 + +- Enable users to choose 'system fields' like created_on for campaign events + +## v4.1.0 + +- Management commnd to recalculate node counts +- Fix run path triggers when paths are trimmed +- Allow file overwrite for public S3 uploads + +## v4.0.3 + +- Handle cases when surveyor submits run with deleted action set +- Document modified_on on our API endpoint +- Use ElasticSearch for the omnibox widget + +## v4.0.2 + +- fix count of suborgs after org deletion + +## v4.0.1 + +- remove group settings call for WhatsApp which is no longer supported +- easier way to service flows for CS reps + +## v4.0.0 + +- Squash all migrations + +## v3.0.1000 + +- fix display of archives formax on home page + +## v3.0.999 + +- Fix chatbase font icon name +- Add encoding config to EX channel type +- Show archive link and information on org page + +## v3.0.449 + +- Improve error message when saving surveyor run fails +- Allow surveyor submissions to match rules on old revisions +- Fix bug in msg export from archives + +## v3.0.448 + +- Support audio attachments in all the audio formats that we can play +- Add name and input to runs API v2 endpoint +- Update InGroup test to match latest goflow +- Expose resthooks over the assets endpoint and update logic to match new engine +- Support messages export from archives + +## v3.0.447 + +- Configure Celery to discover Wechat and Whatsapp tasks +- Add Rwanda and Nigeria to AT claim form options +- Extend timeout for archives links to 24h +- Add created_on to the contact export + +## v3.0.446 + +- Use constants for max contact fields and max group membership columns +- Tweaks to twitter activity claiming that deals with webhooks already being claimed, shows errors etc +- Rename form field to be consistent with the constants we use +- Writes only now use XLSLite, more coverage +- Limit number of groups for group memberships in results exports +- Swicth message export to use XLSLite +- Fix default ACL value for S3 files +- Add WeChat (for beta users) + +## v3.0.445 + +- fix dupe sends in broadcast action + +## v3.0.444 + +- fix per credit calculation + +## v3.0.443 + +- two decimals for per credit costs, remove trailing 0s + +## v3.0.442 + +- Fix ContactField priority on filtered groups +- Update Django to version 1.11.14 +- Reenable group broadcasts + +## v3.0.438 + +- When comparsing msg events in flowserver trials, make paths relative again +- Change VariableContactAction to create contacts even without URNs +- Fix import of ID columns from anon export +- Don't fail twilio channel releases if auth key is no longer vaild +- Add UI messaging for archived data + +## v3.0.437 + +- Fix import of header ID from anon export + +## v3.0.436 + +- Fix supported scheme display lookup +- Move action log delete to flow run release + +## v3.0.435 + +- Fix group test operand when contact name is null +- Mention all AfricasTalking countries on claim page +- Warn user of columns to remove on import +- Release events properly on campaign import +- Add languages endpoint to asset server + +## v3.0.434 + +- Add option for two day run expiration +- Change group rulesets to use contact as operand same as new engine +- Fix reconstructing sessions for runs being trialled in the flowserver so that we include all session runs + +## v3.0.433 + +- Write boolean natively when exporting to xlsx +- Improve reporting of flow server errors during trials +- Clarify about contact import columns +- Update flow result exports to match recent changes to contact exports + +## v3.0.432 + +- Update modified_on on contacts that have their URN stolen +- Full releasing of orgs and users + +## v3.0.431 + +- Set exit_uuid at end of path when run completes +- Make twitter activity API the default twitter channel type +- Add Nigeria and Rwanda to AT supported countries +- Don't exclude result input from flowserver trial result comparisons +- Use operand rather than msg text for result input +- Remove reporting to sentry when @flow.foo.text doesn't equal @step.text +- Add flow migration to replace @flow.foo.text expressions on non-waiting rulesets + +## v3.0.430 + +- Fix message flow updating + +## v3.0.429 + +- Remove org.is_purgeable +- Fix format of archived run json to match latest rp-archiver +- Fix checking of result.text values in the context +- Import/Export column headers with type prefixes +- Add groups membership to contacts exports +- Retry calls that are in IVRCall.RETRY_CALL +- Retry IVR outgoing calls if contact did not answer + +## v3.0.428 + +- Add FlowRun.modified_on to results exports +- Change how we select archives for use in run exports to avoid race conditions +- Report to sentry when @flow.foo.text doesn't match @step.text + +## v3.0.427 + +- Release webhook events on run release +- Fetch run results from archives when exporting results +- Don't create action logs for non-test contacts + +## v3.0.426 + +- Migrations for FK protects, including all SmartModels +- Update to latest xlsxlite to fix exporting date fields +- Remove merged runs sheet from results exports +- Modified the key used in the transferto API call + +## v3.0.425 + +- Enable burst sms type + +## v3.0.424 + +- add burst sms channel type (Australia and New Zealand) + +## v3.0.423 + +- trim event fires every 15 minutes + +## v3.0.422 + +- Trim event fires older than a certain age +- More consistent name of date field on archive model +- Remove no longer needed functionality for runs that don't have child_context/parent_context set + +## v3.0.421 + +- Degroup contacts on deactivate + +## v3.0.420 + +- release sessions on reclaimed urns + +## v3.0.419 + +- special case deleted scheme in urn parsing +- release urn messages when releasing a contact +- add delete reason to run + +## v3.0.418 + +- Clear child run parent reference when releasing parent +- Make sync events release their alerts +- Release sessions, anonymize urns + +## v3.0.417 + +- add protect to contacts and flows, you can fake the migrations in this release + +## v3.0.416 + +- add deletion_date, use full path as link name +- add unique constraint to disallow dupe archives + +## v3.0.415 + +- add needs_deletion field, remove is_purged + +## v3.0.414 + +- Set run.child_context when child has no waits +- Use latest openpyxl and log the errors to sentry +- Don't blow up if trialled run has no events +- Allow editors to see archives / api +- Migration to backfill run parent_context and child_context + +## v3.0.412 + +- Fix archive filter test +- Include id when serializing contacts for goflow + +## v3.0.411 + +- Show when build failed becuse black was not executed +- Fix calculation of low threshold for credits to consider only the top with unused credits +- All flows with subflows to be trialled in the flowserver +- Create webhook mocks for use in flowserver trials from webhook results +- Enable Archive list API endpoint + +## v3.0.410 + +- Remove purging, add release with delete_reason +- Set parent_context in Flow.start and use it in FlowRun.build_expressions_context if available +- Add is_archived counts for LabelCounts and SystemLabelCounts, update triggers + +## v3.0.409 + +- Remove explicit use of uservoice +- Use step_uuids for recent message calculation + +## v3.0.408 + +- Format code with blackify +- Add management commands to update consent status and org membership +- Update to latest goflow to fix tests +- Fix 'raise None' in migration and make flow server trial period be 15 seconds +- Fix the campaign events fields to be datetime fields +- Move flow server stuff from utils.goflow to flows.server +- Add messangi channel type + +## v3.0.407 + +- Reenable requiring policy consent +- Allow msgs endpoint to return ALL messages for an org sorted by created_on +- Return error message if non-existent asset requested from assets endpoint +- If contact sends message whilst being started in a flow, don't blow up +- Remove option to have a flow never expire, migrate current flows with never to 30 days instead +- Request the user to fill the LINE channel ID and channel name on the claim form + +## v3.0.406 + +- Fix logging events to intercom + +## v3.0.405 + +- Migration to remove FlowStep + +## v3.0.404 + +- remove old privacy page in favor of new policy app +- use python3 `super` method +- migration to backfill step UUIDs on recent runs + +## v3.0.403 + +- tweaks to add_analytics users + +## v3.0.402 + +- add native intercom support, add management command to update all users + +## v3.0.401 + +- Fix quick replies in simulator +- Lower the min length for Facebook page access token +- Update Facebook claim to ask for Page ID and Page name from the user +- Add new policies and consent app +- Fix another migration that adds a field and writes to it in same transaction +- Add step UUID fields to FlowPathRecentRun and update trigger on run paths to start populating them + +## v3.0.400 + +- Don't create flow steps +- Remove remaining usages of six + +## v3.0.399 + +- Drop no longer used FlowRun.message_ids field +- Don't allow nested flowserver trials +- Fix migrations which can lead to locks because they add a field and populate it in same transaction +- Remove a lot of six stuff +- Use bulk_create's returned msgs instead of forcing created_on to be same for batches of messages created by Broadcast.send +- Use sent_on for incoming messages's real world time +- Don't require steps for flow resumptions + +## v3.0.398 + +- Add period, rollup fields to archive + +## v3.0.397 + +- Stop writing .recipients when sending broadcasts as this is only needed for purged broadcasts +- Rework run_audit command to check JSON fields and not worry about steps +- Replace json_date_to_datetime with iso8601.parse_date +- Stepless surveyor runs + +## v3.0.396 + +- Use run path instead of steps to recalculate run expirations +- Stop writing to FlowRun.message_ids + +## v3.0.395 + +- Change FlowRun.get_last_msg to use message events instead of FlowRun.message_ids +- Stop saving message associations with steps + +## v3.0.393 + +- Drop values_value + +## v3.0.392 + +- Remove broadcast purging + +## v3.0.391 + +- remove reference to nyaruka for trackings users +- fix test decoration to work when no flow server configured + +## v3.0.390 + +- Disable webhook calls during flowserver trials +- Use FlowRun.events for recent messages rollovers + +## v3.0.389 + +- add archive model, migrations + +## v3.0.388 + +- Make ContactField header clickable when sorting +- Add first python2 incompatible code change +- Add contact groups sheet on contact exports +- Remove contact export as CSV +- Update to latest goflow +- Fix test_db contact fields serialization + +## v3.0.387 + +- fix flowstarts migration + +## v3.0.386 + +- update start contact migration to work with malformed extra + +## v3.0.384 + +- fix not selecting contact id from ES in canary task + +## v3.0.383 + +- add canary task for elasticsearch +- record metrics about flowserver trial to librarto +- allow sorting of contact fields via dragging in manage dialog + +## v3.0.382 + +- rename flow migration + +## v3.0.381 + +- limit number of flows exited at once, order by expired_on to encourage index +- remove python 2.7 build target in travis +- start flow starts in the flows queue vs our global celery one +- add flow start count model to track # of runs in a flow start +- Always use channel.name for channel assets + +## v3.0.380 + +- update to latest goflow to get location support +- better output logs for goflow differences + +## v3.0.379 + +- add v2 editor through /v2 command in simulator + +## v3.0.378 + +- get all possible existing Twilio numbers on the Twilio account +- reenable group sends \* +- remove Value model usage, Contact.search + +## v3.0.377 + +- do not allow dupe broadcasts to groups +- Use ElasticSearch to export contacts and create dynamic groups +- remove celery super auto scaler +- update whatsapp activation by setting rate limits using new endpoints +- fix incorrect keys for tokens and account sids for twiml apps +- add ability to test flow results against goflow + +## v3.0.376 + +- remove celery super auto scaler since we don't use it anywhere +- update whatsapp activation by setting rate limits using new endpoints +- fix incorrect keys for tokens and account sids for twiml apps +- add admin command to help audit ES and DB discrepencies + +## v3.0.375 + +- update whatsapp for new API +- new index on contacts_contact.fields optimized for space + +## v3.0.374 + +- allow reading, just not writing of sends with groups +- remove old seaching from contact views + +## v3.0.373 + +- optimize group views +- don't allow sends to groups to be imported or copied +- remove normal junebug, keep only junebug ussd +- fix isset/~isset, sort by 'modified_on_mu' in ES +- use ES to search for contacts + +## v3.0.372 + +- remap sms and status Twilio urls, log people still calling old ones +- fix to display Export buttons on sent msgs folder and failed msgs folder +- use message events in run.events for results exports instead of run.message_ids + +## v3.0.371 + +- add twilio messaging handling back in + +## v3.0.370 + +- remove logging of base handler being called + +## v3.0.369 + +- rename contact field types of decimal to number +- finalize contact imports so that updated contacts have modified_on outside transaction +- try to fetch IVR recordings for up to a minute before giving up +- remove handling and sendind code for all channel types (except twitter and junebug) + +## v3.0.368 + +- Fewer sentry errors from ES searching +- Don't assume messages have a UUID in FlowRun.add_messages + +## v3.0.367 + +- allow up to two minutes for elastic search lag + +## v3.0.366 + +- fix empty queryset case for ES comparison + +## v3.0.365 + +- chill the f out with sentry if the first contact in our queryset is less than 30 seconds old +- fix duplicate messages when searching on msgs whose contacts have more than one urn + +## v3.0.364 + +- fix environment variable for elastic search, catch all exceptions + +## v3.0.363 + +- Add Elastic searching for contacts, for now only validating that results through ES are the same as through postgres searches + +## v3.0.361 + +- Migrate Dart/Hub9 Contact urns and channels to support ext schemes + +## v3.0.360 + +- Use more efficient queries for check channels task +- Fix Location geojson import + +## v3.0.359 + +- Add API endpoint to view failed messages + +## v3.0.358 + +- Allow filtering by uuid on runs API endpoint, and include run uuid in webhooks +- Fix blockstrans failing on label count + +## v3.0.357 + +- Add linear backdown for our refresh rate on inbox pages + +## v3.0.356 + +- Do not log MageHandler calls +- Serialize contact field label as name instead + +## v3.0.355 + +- Use force_text on uuids read from redis +- Log errors for any channel handler methods + +## v3.0.354 + +- Set placeholder msg.id = 0 +- Fix comparison when price is None + +## v3.0.353 + +- Evaluate contact field with no value as False + +## v3.0.352 + +- Update to Facebook graph api v2.12 + +## v3.0.351 + +- Support plain ISO dates (not just datetimes) + +## v3.0.350 + +- Swallow exceptions encountered when parsing, don't add to group +- Set placeholder msg.id = 0 + +## v3.0.349 + +- Deal with null state values in contact search evaluation + +## v3.0.348 + +- Fix off by one error in calculating best channel based on prefixes +- Reevaluate dynamic groups using local contact fields instead of SQL + +## v3.0.347 + +- Add modified_on index for elasticsearch + +## v3.0.346 + +- Don't start archived flows +- Don't show stale dates on campaign events +- Allow brands to configure flow types +- Remove group search from send to others action +- Fixes for test contact activity + +## v3.0.345 + +- Migration to backfill run.events and add step uuids to run.path +- Do the right thing when we are presented with NaN decimals + +## v3.0.344 + +- Use real JSONField for FlowRun.events +- Add FlowRun.events and start populating with msg events for new runs +- Serialize Contact.fields in test_db +- Update to latest goflow release + +## v3.0.342 + +- Fix for decimal values in JSON fields attribute +- Fix for not being able to change contact field types if campaign event inactive + +## v3.0.341 + +- Add if not exists to index creation for fields +- Last of Py3 compatibility changes + +## v3.0.340 + +- Use fields JSON field on Contact instead of Value table for all reading. +- Force campaign events to be based off of DateTime fields +- Migration to change all contact fields used in campaign events to DateTime +- Migration to add GIN index on Contact.fields + +## v3.0.339 + +- Remove leading and trailing spaces on location string before boundaries path query +- Require use of update_fields with Contact.save() +- Event time of contact_changed is when contact was modified +- Use latest goflow release +- Make special channel accessible during simulator use + +## v3.0.338 + +- Always serialize contact field datetime values in the org timezone +- Add migration for population of the contact field json + +## v3.0.336 + +- Update middlewares to Django defaults for security +- Add JSON fields to Contact, set in set_field +- backfill any null location paths, make not null, update import to set path, set other levels on fields when setting location + +## v3.0.335 + +- Allow groups when scheduling flows or triggers +- Fix configuration page URLs and use courier URLs +- Replace contact.channel in goflow serialization with a channel query param in each contact URN +- Serialize contact.group_uuids as groups with name and UUID + +## v3.0.334 + +- Add response to external ID to courier serialized msg if we have response to +- More Py3 migration work +- Remove broadcasting to groups from Send Message dialog + +## v3.0.332 + +- Do not delete RuleSets only disconnect them from flows + +## v3.0.331 + +- Fix scoping for sim show/hide + +## v3.0.330 -v8.3.27 (2023-09-25) -------------------------- - * Tweak mailroom_db to create an FBA channel instead of a TWT channel - * Remove ticketers as a feature and the views for connecting external ticketers - * Re-add optin as distinct message type - * Add undocumented API endpoint for opt-ins +- Allow toggling of new engine on demand with /v2 command in simulator -v8.3.26 (2023-09-22) -------------------------- - * Bump cryptography from 41.0.3 to 41.0.4 - * Add optin field to Broadcast +## v3.0.329 -v8.3.25 (2023-09-21) -------------------------- - * Fix trigger ordering +- Fix negative cache ttl for topups -v8.3.24 (2023-09-21) -------------------------- - * Add opt-in and opt-out trigger types (staff only for now) - * Group keyword triggers and catch all triggers under a Messages folder - * Move broadcasts and scheduled to their own pages +## v3.0.328 -v8.3.23 (2023-09-21) -------------------------- - * Replace Msg.type=optin with optin reference on msg - * Group trigger types into folders - * Make sure staff can update the log policy on all channel types +- Remove Vumi Type +- Remove custom autoscaler for Celery +- Implement Plivo without Plivo library -v8.3.22 (2023-09-19) -------------------------- - * Make ticketers API endpoint unpublicized - * Add 'Send Now' to broadcast creation +## v3.0.325 -v8.3.21 (2023-09-18) -------------------------- - * Add basic OptIn model - * Use env variable for dev mode host +- Build dynamic groups in background thread +- Dynamic Channel changes, use uuids in URLs, allow custom views +- Allow WhatsApp channels to refresh contacts manually +- Allow brands to specifiy includes for the document head +- Fix external claim page, rename auth_urn for courier +- Change VB channel type to be a dynamic channel +- Remove unused templates -v8.3.20 (2023-09-12) -------------------------- - * Update editor for localized attachment fix +## v3.0.324 -v8.3.19 (2023-09-12) -------------------------- - * Add new data migration to fix IVR call counts - * Drop Channel.parent, ContactURN.auth and Org.input_cleaners - * Remove support for delegate channels +- Add ability to run select flows against a flowserver instance -v8.3.18 (2023-09-07) -------------------------- - * Add data migration to populate ContactURN.auth_tokens +## v3.0.323 -v8.3.17 (2023-09-06) -------------------------- - * Add ContactURN.auth_tokens to replace .auth +- Move JioChat access creation to channel task +- Use 'list()' on python3 dict iterators +- Use analytics-python===1.2.9, python3 compatible +- Fix using PlayAction in simulator and add tests +- Fix HasEmailTest to strip surrounding punctuation +- ContainsPhraseTest shouldn't blow up if test string is empty +- Use 'six' library for urlparse, urlencode -v8.3.16 (2023-09-06) -------------------------- - * Tweak documentation for flow_starts endpoint - * Allow agents to update tickets topics +## v3.0.322 -v8.3.15 (2023-09-06) -------------------------- - * Add hover-darker button option - * Update icons +- Unfreeze phonenumbers library so we always use latest +- Remove old Viber VI channel type +- Add config template for LN channel type +- Move configuration blurbs to channel types +- Move to use new custom model JSONAsTextField where appropriate -v8.3.14 (2023-08-31) -------------------------- - * Limit to load the recent 100 sessions - * Disallow GET request for media upload view +## v3.0.321 -v8.3.13 (2023-08-28) -------------------------- - * Tweaks to the channel config blurbs for consistency - * Fetching messages by label should include arched messages - * Use secrets module instead of random for random_string - * Little bit of cleanup in channel types like removing unused fields +- Fix quick-reply button in flow editor -v8.3.12 (2023-08-23) -------------------------- - * Add ChannelType.config_ui to replace configuration_urls, configuration_blurb etc - * Show Somleng config URLs based on channel role - * Add Org.input_collation - * Remove Blackmnyna, Chikka, Junebug, Twitter legacy, old Zenvia channel types +## v3.0.320 -v8.3.11 (2023-08-17) -------------------------- - * Convert final haml templates in root directory +- Fix webhook rule as first step in run interpreting msg wrong +- Change mailto URN importing to use header 'mailto' and make 'email' always a field. Rename 'mailto' fields to 'email'. -v8.3.10 (2023-08-17) -------------------------- - * Add Org.input_cleaners - * Always show name / anon id for anon orgs in contact lists - * Don't let mailroom handle tasks during tests - * Fix title on welcome page +## v3.0.319 -v8.3.9 (2023-08-16) -------------------------- - * Fix onSpload fire when initial page doesn't call it +- Add ArabiaCell channel type +- Tweaks to Mtarget channel type +- Pathfix for highcharts -v8.3.8 (2023-08-16) -------------------------- - * Use $ instead of onSpload +## v3.0.318 -v8.3.7 (2023-08-16) -------------------------- - * Fix Javascript on claim number view - * Switch test_db to assume a docker container +- Add input to webhook payload -v8.3.6 (2023-08-15) -------------------------- - * Convert haml templates in includes folder and utils app - * Cleanup page titles in settings section +## v3.0.317 -v8.3.5 (2023-08-14) -------------------------- - * Convert haml templates in public and orgs apps +- Remove support for legacy webhook payload format +- Fix org-choose redirects for brands -v8.3.4 (2023-08-14) -------------------------- - * Convert templates in assets, channels, msgs, request_logs and schedules apps as well as overridden smartmin templates +## v3.0.316 -v8.3.3 (2023-08-10) -------------------------- - * Simplify message indexes and system label queries +- Remove stop endpoint for MT -v8.3.2 (2023-08-10) -------------------------- - * Add data migration to convert old I/F msg types +## v3.0.315 -v8.3.1 (2023-08-09) -------------------------- - * Merge pull request #4779 from nyaruka/less_haml - * Some tweaks to templates based on linter - * Convert all haml templates in channel types +- Inactive flows should not be listed on the API endpoint +- Add Mtarget channel type -v8.3.0 (2023-08-09) -------------------------- - * Drop no longer used Org.brand field - * Add messagebird channel type +## v3.0.314 -v8.2.0 (2023-08-07) -------------------------- - * Update stable versions +- Add run dict to default webhook payload -v8.1.245 (2023-08-05) -------------------------- - * Truncate query lables on flow start - * Fix line length formatting - * Fixes for login and API titles +## v3.0.313 -v8.1.244 (2023-08-04) -------------------------- - * Fix error handling for temba-contact-search +- have URNs resolve to dicts instead of just the display +- order transfer credit options by name +- show dashboard link even if org is chosen -v8.1.243 (2023-08-03) -------------------------- - * Fix DELETE endpoints in API explorer - * Bump cryptography from 41.0.2 to 41.0.3 +## v3.0.312 -v8.1.242 (2023-08-02) -------------------------- - * Update to components with modax serialize fix +- include contact URN in webhook payload -v8.1.241 (2023-08-02) -------------------------- - * Fix two factor disable and initial QR code rendering +## v3.0.311 -v8.1.240 (2023-08-01) -------------------------- - * Update components with checkbox value update - * Stop writing no longer used Org.brand +- Allow exporting results of archived flows +- Update Twitter Activity channels to work with latest beta changes +- Increase maximum attachment URL length to 2048 +- Tweak contact searching so that set/not-set conditions check the type specific column +- Migration to delete value decimal/datetime instances where string value is "None" +- Don't normalize nulls in @extra as "None" +- Clear timeouts for msgs which dont have credits assigned to them +- Simpler contact get_or_create method to lookup a contact by urn and channel +- Prevent updating name for existing contact when we receive a message +- Remove fuzzy matching for ContainsTest -v8.1.239 (2023-08-01) -------------------------- - * Temp fix for org export page by replacing temba-checkbox with regular inputs - * Cleanup msg_console +## v3.0.310 -v8.1.238 (2023-07-28) -------------------------- - * Fix flow start log when starts don't have exclusions - * Remove unnecessary CSS class to hover +- Reimplement clickatell as a Courier only channel against new API -v8.1.237 (2023-07-28) -------------------------- - * Only consider the parsed query string in contact_search clean - * Add show CSS class to icon for contact list sorting +## v3.0.309 -v8.1.236 (2023-07-27) -------------------------- - * Rename flow_broadcast to flow_start - * Update editor to fix cases on result split - * Add new channel log types used by courier - * Update contact search widget for flow starts +- Use database trigger for inserting new recent run records +- Handle stop contact channel events +- Remove no longer used FlowPathRecentRun model -v8.1.235 (2023-07-26) -------------------------- - * Convert templates in dashboard, docs, globals, ivr, locations and notifications apps - * Use title-text for just overriding the text - * Restore missing msg box templates +## v3.0.308 -v8.1.234 (2023-07-25) -------------------------- - * Fix org export page - * Fix permissions for viewer for flow results +'# Enter any comments for inclusion in the CHANGELOG on this revision below, you can use markdown -v8.1.233 (2023-07-25) -------------------------- - * Simpliy convert_templates script - * Consistent title for initial page load - * Remove spa-title and spa-style - * Add archives to STORAGES +- Update date for webhook change on api docs +- Don't use flow steps for calculating test contact activity -v8.1.232 (2023-07-24) -------------------------- - * Do not set the max for y axis chart to allow that to be calculated - * Convert templates in the triggers app from haml +## v3.0.307 -v8.1.231 (2023-07-21) -------------------------- - * Simplify redis settings and organize settings better in sections +- Stop using FlowPathRecentMessage -v8.1.230 (2023-07-20) -------------------------- - * Tweak system check for storage settings to check different storages are configured - * Convert S3 log access to be via django storages - * Use pg_dump/restore from docker container in mailroom_db command so it's always correct version +## v3.0.306 -v8.1.229 (2023-07-19) -------------------------- - * Fix tickets list, to show compose properly on Firefox - * Add cpAddress parameter as optional for MTN channel type +- Migration to convert recent messages to recent runs -v8.1.228 (2023-07-18) -------------------------- - * Update Instagram docs broken link - * Allow initiating flow results download form the the flow labels filter view +## v3.0.305 -v8.1.227 (2023-07-17) -------------------------- - * Bump cryptography from 41.0.0 to 41.0.2 +- Add new model for tracking recent runs +- Add dynamic group optimization for new contacts -v8.1.226 (2023-07-13) -------------------------- - * Rework trimming cron tasks to use delete_in_batches - * Drop no longer used Binary Optional Data field +## v3.0.304 -v8.1.225 (2023-07-13) -------------------------- - * Fix icon for globals delete - * Migrate old Twilio channels using .bod to use .config instead - * Remove duplicate menu views in classifiers and channels apps +- Drop index on FlowStep.step_uuid as it's no longer needed -v8.1.224 (2023-07-12) -------------------------- - * Add log_policy to channel +## v3.0.303 + +- Still queue messages for sending when interrupted by a child + +## v3.0.302 + +- Use FlowRun.current_node_uuid for sending to contacts at a given flow node + +## v3.0.301 + +- Tweak process_message_task to not blow up if message doesn't exist +- Use FlowRun.message_ids for flow result exports + +## v3.0.300 + +- Use config secret instead of secret field on Channel +- Add tests for datetime contact API field update + +## v3.0.299 + +- Fix deleting resthooks +- Fix quick replies UI on Firefox + +## v3.0.298 + +- Process contact queue until there's a pending message or empty +- Make date parsing much stricter +- Migration to fix run results which were numeric but parsed as dates +- Use transaction when creating contact URN +- Add support for v2 webhooks + +## v3.0.294 + +- Fix run.path trigger to not blow up deleting old steps that don't have exit_uuids +- Define MACHINE_HOSTNAME for librato metrics + +## v3.0.293 + +- Fix handle_ruleset so we don't continue the run if a child has exited us +- Migration to backfill FlowRun.message_ids and .current_node_uuid (recommend faking and running manually) + +## v3.0.292 + +- Add support for 'direct' db connection +- Stop updating count and triggered on on triggers +- Add FlowRun.current_node_uuid and message_ids +- Catch IntegrityError and lookup again when creating contact URN +- Make sure we dont allow group chats in whatsapp + +## v3.0.291 + +- Ignore TMS callbacks + +## v3.0.289 + +- Stop writing values in flows to values_value + +## v3.0.287 + +- Performance improvements and simplications to flow result exports +- Add some extra options to webhook_stats +- Migration to convert old recent message records + +## v3.0.286 + +- Remove incomplete path counts + +## v3.0.285 + +- Migrate languages on campaign events +- Rework flow path count trigger to use exit_uuid and not record incomplete segments + +## v3.0.282 + +- Don't import contacts with unknown iso639-3 code +- Make angular bits less goofy for quick replies and webhooks +- Add is_active index on flowrun +- Don't disassociate channels from orgs when they're released +- Include language column in Contact export + +## v3.0.281 + +- Set tps for nexmo and whatsapp +- Dont overwrite name when receiving a message from a contact that already exists +- Flow start performance improvements + +## v3.0.280 + +- Parse ISO dates followed by a period +- Optimize batch flow starts + +## v3.0.279 + +- Update Nexmo channels to use new Courier URLs +- Store path on AdminBoundary for faster lookups +- Serialize metata for courier tasks (quick replies support) +- Add default manager to AdminBoundary which doesn't include geometry + +## v3.0.278 + +- Fixes to the ISO639-3 migration +- Add support for quick replies + +## v3.0.277 + +- Add flow migration for base_language in flow definitions + +## v3.0.276 + +- back down to generic override if not found with specific code +- Add esp-spa as exception + +## v3.0.275 + +- Fix language migrations + +## v3.0.274 + +- Fix serialization of 0 decimal values in API +- Add initial version of WhatsApp channel (simple messaging only) +- Migrate to iso639-3 language codes (from iso639-2) +- Remove indexes on Msg, FlowRun and FlowStep which we don't use +- Remove fields no longer used on org model + +## v3.0.273 + +- Don't blow up when a flow result doesn't have input + +## v3.0.272 + +- Fix parsing ISO dates with negative offsets + +## v3.0.271 + +- Serialize contact field values with org timezone + +## v3.0.270 + +- Load results and path from new JSON fields instead of step/value objects on API runs endpoint + +## v3.0.269 + +- Fix campaign export issue +- Disable legacy analytics page +- Change date constants and contact fields to use full/canonical format in expressions context + +## v3.0.265 + +- Fix not updating versions on import flows +- Require FlowRun saves to use update_fields +- Rework get_results to use FlowRun.results +- Don't allow users to save dynamic groups with 'id' or 'name' attributes +- Add flow version 11.0, create migration to update references to contact fields and flow fields + +## v3.0.264 + +- Show summary for non-waits on flow results +- Reduce number of queries during flow handling + +## v3.0.263 + +- Start campaigns in separate task +- Enable flow results graphs on flow result page +- Fix run table json parsing +- SuperAutoScaler! + +## v3.0.262 + +- Use string comparison to optimize temba_update_flowcategorycount +- Allow path counts to be read by node or exit +- SuperAutoscaler +- Fix inbox views so we don't look up channel logs for views that don't have them +- Add management command for analyzing webhook calls +- Change recent message fetching to work with either node UUID or exit UUID + +## v3.0.261 + +- Migrate revisions forward with rev version +- Limit scope of squashing so we can recover from giant unsquashed numbers + +## v3.0.260 + +- Make tests go through migration +- Set version number of system created flows +- Block saving old versions over new versions +- Perform apply_topups as a task, tweak org update form +- Updates to credit caches to consider expiration +- Tweak credit expiration email + +## v3.0.259 + +- Improve performance and restartability of run.path backfill migration +- Update to latest smartmin +- Use run.results for run results page + +## v3.0.258 + +- Set brand domain on channel creations, use for callbacks + +## v3.0.257 + +- Migration to populate run paths (timeconsuming, may want to fake aand run manually) +- Ensure actions have UUIDs in single message and join-group flows +- Flow migration command shouldn't blow up if a single flow fails + +## v3.0.255 + +- Fix Twilio to redirect to twilio claim page after connecting Twilio +- Add FlowRun.path and start populating it for new flow steps +- Removes no longer used Msg.has_template_error field + +## v3.0.254 + +- Use get_host() when calculating signature for voice callbacks + +## v3.0.253 + +- use get_host() when validating IVR requests + +## v3.0.252 + +- Better Twilio channel claiming + +## v3.0.250 + +- Tweaks to recommended channels display + +## v3.0.246 + +- Update smartmin to version 1.11.4 +- Dynamic channels: Chikka, Twilio, Twilio Messaging Service and TwiML Rest API + +## v3.0.245 + +- Tweaks to the great FlowRun results migration for better logging and for parallel migrations +- Fixes us showing inactive orgs in nav bar and choose page +- Ignore requests missing text for incoming message from Infobip + +## v3.0.244 + +- Add exit_uuid to all flow action_sets (needed for goflow migrations) + +## v3.0.243 + +- Add index to FlowPathRecentMessage +- Flows API endpoint should filter out campaign message flow type +- Add archived field to campaings API endpoint +- Fix to correctly substitute context brand variable in dynamic channel blurb + +## v3.0.242 + +- Data migration to populate results on FlowRun (timeconsuming, may want to fake and run manually) + +## v3.0.239 + +- Migration to increase size of category count + +## v3.0.238 + +- Increase character limits on category counts + +## v3.0.237 + +- Fix Nexmo channel link +- Add results field to FlowRun and start populating +- Add FlowCategoryCount model for aggregating flow results +- Remove duplicate USSD channels section + +## v3.0.234 + +- Remove single message flows when events are deleted + +## v3.0.233 + +- Remove field dependencies on flow release, cleanup migration +- Update to latest Django 1.11.6 + +## v3.0.232 + +- Mage handler shouldn't be accessible using example token in settings_common +- Make Msg.has_template_error nullable and stop using it + +## v3.0.231 + +- Add claim page for dmark for more prettiness +- Add management command to migrate flows forward +- Add flow migration for partially localized single message flows +- Recalculate topups more often +- Add dmark channel (only can send and receive through courier) +- Merge pull request #1522 from nyaruka/headers +- Replace TEMBA_HEADERS with http_headers() +- Improve mock server used by tests so it can mock specifc url with specific responses +- Add method to get active channels of a particular channel type category +- Replace remaining occurrences of assertEquals +- Fix the way to check USSD support +- Dynamic channels: Vumi and Vumi USSD + +## v3.0.230 + +- Deal with malformed group format as part of group updates +- Allow installs to configure how many fields they want to keep in @extra +- Fix Nexmo icon +- Add logs for incoming requests for InfoBip +- Do both Python 2 and 3 linting in a single build job + +## v3.0.229 + +- Do not set external ID for InfoBip we have send them our ID +- Fix channel address comparison to be insensitive to + +- Use status groupId to check from the InfoBip response to know if the request was erroneous + +## v3.0.228 + +- Add id to reserved field list + +## v3.0.227 + +- Update Infobip channel type to use the latest JSON API +- Migrate flows forward to have dependencies + +## v3.0.226 + +- Fix issue with dates in the contact field extractor +- Allow org admin to remove invites + +## v3.0.225 + +- Optimize how we check for unsent messages on channels +- Ensure all actions have a UUID in new flow spec version 10.1 +- Fixes viber URN validation: can be up to 24 chars +- Dynamic channels: Zenvia, YO +- Add support for minor flow migrations + +## v3.0.224 + +- Remove duplicate excellent includes (only keep compressed version) + +## v3.0.222 + +- Only show errors in UI when org level limits of groups etc are exceeded +- Improve error messages when org reaches limit of groups etc + +## v3.0.221 + +- Add indexes for retying webhook events + +## v3.0.220 + +- Remove no longer used Msg.priority (requires latest Mage) + +## v3.0.219 + +- Create channel event only for active channels +- Limit SMS Central channel type to the Kathmandu timezone +- Create fields from expressions on import +- Flow dependencies for fields, groups, and flows +- Dynamic channels: Start +- Dynamic channels: SMS Central + +## v3.0.218 + +- Delete simulation messages in batch of 25 to use the response_to index +- Fix Kannel channel type icon +- @step.contact and @contact should both be the run contact +- Migration to set value_type on all RuleSets + +## v3.0.217 + +- Add page titles for common pages +- New index for contact history +- Exit flows in batches so we dont have to grab all runs at once +- Check we can create a new groups before importing contact and show the error message to the user +- Fixes value type guessing on rulesets (we had zero typed as dates) +- Update po files +- Dynamic channels: Shaqodoon + +## v3.0.216 + +- Should filter user groups by org before limiting to 250 +- Fixes for slow contact history +- Allow updating existing fields via API without checking the count +- Update TWIML IVR protocol check +- Add update form fields in dynamic channel types +- Abstract out the channel update view form classes +- Add ivr_protocol field on channel type +- Mock constants to not create a lot of objects in test DB +- Limit the contact fields max per org to 200 to below the max form post fields allowed +- Limit number of contact groups creation on org to 250 +- Limit number of contact fields creation on org to 250 +- Dynamic channels: Red Rabbit, Plivo Nexmo + +## v3.0.212 + +- Make Msg.priority nullable so courier doesn't have to write to it +- Calculate TPS cost for messages and add them to courier queues +- Fix truncate cases in SQL triggers +- Fix migration to recreate trigger on msgs table +- Dynamic channels: Mblox + +## v3.0.211 + +- Properly create event fires for campaign events updated through api +- Strip matched string in not empty test +- Dynamic channels: Macrokiosk + +## v3.0.210 + +- Make message priority be based on responded state of flow runs +- Support templatized urls in media +- Add UI for URL Attachments +- Prevent creation of groups and labels at flow run time +- Dynamic channels: M3Tech, Kannel, Junebug and Junebug USSD + +## v3.0.209 + +- Add a way to specify the prefixes short codes should be matching +- Include both high_priority and priority in courier JSON +- Fix TwiML migration +- Fix JSON response when searching Plivo numbers + +## v3.0.208 + +- Msg.bulk_priority -> Msg.high_priority +- Change for currencies for numeric rule +- Dynamic channels for Jasmin, Infobip, and Hub9 + +## v3.0.207 + +- Fix Twiml config JSON keys +- Unarchiving a campaign should unarchive all its flows + +## v3.0.206 + +- Fix broken Twilio Messaging Service status callback URL +- Only update dynamic groups from set_field if value has changed +- Optimize how we lookup contacts for some API endpoints +- More dynamic channels + +## v3.0.205 + +- add way to show recommended channel on claim page for dynamic channels +- change Org.get_recommended_channel to return the channel type instead of a random string + +## v3.0.204 + +- separate create and drop index operations in migration + +## v3.0.203 + +- create new compound index on channel id and external id, remove old external id index +- consistent header for contact uuid in exports and imports +- unstop contacts in handle message for new messages +- populate @extra even on webhook failures +- fix flow simulator with chatbase connected +- use ContactQL for name of contact querying grammar +- dynamic channels: Clickatell +- fix contact searching where text includes + or / chars +- replace Ply with ANTLR for contact searching (WIP) + +## v3.0.201 + +- Make clean string method replace non characteres correctly + +## v3.0.200 + +- Support Telegram /start command to trigger new conversation trigger + +## v3.0.199 + +- Use correct Twilio callback URL, status is for voice, keep as handler + +## v3.0.198 + +- Add /c/kn/uuid-uuid-uuid/receive style endpoints for all channel types +- Delete webhook events in batches +- Dynamic channels: Blackmyna + +## v3.0.197 + +- update triggers so that updates in migration work + +## v3.0.196 + +- make sure new uuids are honored in in_group tests +- removes giant join through run/flow to figure out flow steps during export +- create contacts from start flow action with ambiguous country +- add tasks for handling of channel events, update handlers to use ChannelEvent.handle +- add org level dashboard for multi-org organizations + +## v3.0.195 + +- Tweaks to allow message handling straight from courier + +## v3.0.193 + +- Add flow session model and start creating instances for IVR and USSD channel sessions + +## v3.0.192 + +- Allow empty contact names for surveyor submissions but make them null +- Honor admin org brand in get_user_orgs +- Fix external channel bulk sender URL +- Send broadcast in the same task as it is created in and JS utility method to format number +- Try the variable as a contact uuid and use its contact when building recipients +- Fix org lookup, use the same code path for sending a broadcast +- Fix broadcast to flow node to consider all current contacts on the the step + +## v3.0.191 + +- Update test_db to generate deterministic UUIDs which are also valid UUID4 + +## v3.0.190 + +- Turn down default courier TPS to 10/s + +## v3.0.189 + +- Make sure msg time never wraps in the inbox + +## v3.0.188 + +- Use a real but mockable HTTP server to test flows that hit external URLs instead of mocking the requests +- Add infobip as dynamic channel type and Update it to use the latest Infobip API +- Add support for Courier message sending + +## v3.0.183 + +- Use twitter icon for twitter id urns + +## v3.0.182 + +- Tweak test_start_flow_action to test parent run states only after child runs have completed +- Stop contacts when they have only an invalid twitter screen name +- Change to max USSD session length + +## v3.0.181 + +- Ignore case when looking up twitter screen names + +## v3.0.180 + +- Switch to using twitterid scheme for Twitter messages +- Should be shipped before Mage v0.1.84 + +## v3.0.179 + +- Allow editing of start conversation triggers + +## v3.0.178 + +- Remove urn field, urn compound index, remove last uses of urn field + +## v3.0.177 + +- remove all uses of urn (except when writing) +- create display index, backfill identity +- Allow users to specify extra URNs columns to include on the flow results export + +## v3.0.176 + +- Add display and identity fields to ContactURN +- Add schemes field to allow channels to support more than one scheme + +## v3.0.175 + +- Fix incorrect lambda use so message sending works + +## v3.0.174 + +- Make ContactField.uuid unique and non-null + +## v3.0.173 + +- Add migration to populate ContactField.uuid + +## v3.0.172 -v8.1.223 (2023-07-11) -------------------------- - * More tweaks to org deletion +- Only try to delete Twilio app when channel config contains 'application_sid' +- Surveyor submissions should try rematching the rules if the same ruleset got updated by the user and old rules were removed +- Add uuid field to ContactField +- Convert more channel types to dynamic types -v8.1.222 (2023-07-11) -------------------------- - * Add delete_in_batches util function to improve org deletion - * Actually fix deletion of campaign events during org deletion +## v3.0.171 -v8.1.221 (2023-07-11) -------------------------- - * Fix deleting of campaign events and add more logging to org deletion +- Fixes for Twitter Activity channels +- Add stop contact command to mage handler +- Convert Firebase Cloud Messaging to a dynamic channel type +- Convert Viber Public to a dynamic channel type +- Change to the correct way for dynamic channel +- Convert LINE to a dynamic channel type +- Better message in SMS alert email -v8.1.220 (2023-07-10) -------------------------- - * Delete is only for deleting child workspaces +## v3.0.170 -v8.1.219 (2023-07-10) -------------------------- - * Fix problems with org deletion +- Hide SMTP config password and do not change the set password if blank is submitted +- Validate the length of message campaigns for better user feedback +- Make FlowRun.uuid unique and non-null (advise faking this and building index concurrently) -v8.1.218 (2023-07-07) -------------------------- - * Update to flow editor with fix for ward cases +## v3.0.169 -v8.1.217 (2023-07-06) -------------------------- - * Convert haml files in contacts app - * Bump django from 4.2.2 to 4.2.3 +- Migration to populate FlowRun.uuid. Advise faking this and running manually. +- More channel logs for Jiochat channel interactions -v8.1.216 (2023-07-05) -------------------------- - * Add data migration to fix archived message counts for labels - * Convert haml templates in campaigns and classifiers apps +## v3.0.167 -v8.1.215 (2023-07-05) -------------------------- - * Add missing migration that rebuilds constraint on contact URNs - * Update channel log retention to 2 weeks - * Disable old 360 Dilalog channel type, and take the new integration out of beta +- Fix inclusion of attachment urls in webhook payloads and add tests +- Install lxml to improve performance of large Excel exports +- Add proper deactivation of Telegram channels +- Converted Facebook and Telegram to dynamic channel types +- Add nullable uuid field to FlowRun +- Make sure we consider all URN schemes we can send to when looking up the if we have a send channel +- Split Twitter and Twitter Beta into separate channel types +- Remove support for old-style Twilio endpoints -v8.1.214 (2023-07-03) -------------------------- - * Update to psycopg3 non-binary - * Reference templates as html +## v3.0.166 -v8.1.213 (2023-07-03) -------------------------- - * Convert flows app to be hamless +- Release channels before Twilio/Nexmo configs are cleared +- Expose flow start UUID on runs from the runs endpoint -v8.1.212 (2023-07-03) -------------------------- - * Sorted group list when editing contacts - * Switch channel charts to load with json instead of embedded data +## v3.0.165 -v8.1.211 (2023-06-28) -------------------------- - * Fix Twilio channel update modal +- Migration to populate FlowStart.uuid on existing objects (advise faking and run manually) -v8.1.210 (2023-06-28) -------------------------- - * Fix mangling of option attributes - * Save channel logs with channels/ prefix - * Add configurable agent access per contact field +## v3.0.163 -v8.1.209 (2023-06-28) -------------------------- - * Fix creating PublicFileStorage +- Add uuid field to FlowStart +- Migration to convert TwiML apps -v8.1.208 (2023-06-28) -------------------------- - * Fix S3 channel logs paths to not start with slash - * Update to Django 4.2 +## v3.0.160 -v8.1.207 (2023-06-27) -------------------------- - * Convert some haml templates to html +- Add support for Twitter channels using new beta Activity API -v8.1.206 (2023-06-27) -------------------------- - * Drop duplicate index - * Look for channel logs in S3 when not found in database - * Move tracking label counts to statement level triggers +## v3.0.159 -v8.1.205 (2023-06-27) -------------------------- - * Replace index on channellog.channel +- Clean incoming message text to remove invalid chars -v8.1.204 (2023-06-26) -------------------------- - * Fix inline group created and broadcast action +## v3.0.158 -v8.1.203 (2023-06-26) -------------------------- - * Update contact action fix +- Add more exception currencies for pycountry +- Support channel specific Twilio endpoints -v8.1.202 (2023-06-26) -------------------------- - * Rework settings for S3 buckets +## v3.0.156 -v8.1.201 (2023-06-23) -------------------------- - * Support runtime locales in components +- Clean up pip-requires and reset pip-freeze -v8.1.200 (2023-06-23) -------------------------- - * Update for flow editor text inputs with null values +## v3.0.155 -v8.1.199 (2023-06-22) -------------------------- - * Updates for select widget to behave with more standard form controls +- Reduce the rate limit for SMS central to 1 requests per second +- Display Jiochat on channel claim page +- Fix date pickers on modal forms +- Update channels to generate messages with multiple attachments -v8.1.198 (2023-06-22) -------------------------- - * Rollback components +## v3.0.154 -v8.1.197 (2023-06-22) -------------------------- - * Override the correct alpha3 code for Oromifa - * Update form components to use element internals - * Rework loading of channel logs so easier to fetch from S3 too +- Rate limit sending throught SMS central to 10 messages per second +- Fix some more uses of Context objects no longer supported in django 1.11 +- Fix channel log list request time display +- Add @step.text and @step.attachments to message context -v8.1.196 (2023-06-21) -------------------------- - * Improve ExternalURLField and don't assume http - * Use org import task to import flows +## v3.0.153 -v8.1.195 (2023-06-19) -------------------------- - * Name override for oro language - * Remove no longer used code relating to contact fields +- Jiochat channels +- Django 1.11 -v8.1.194 (2023-06-19) -------------------------- - * Don't ignore user provided role for somleng shortcodes - * Fix flow export button height - * Fix import translation to use new UI - * Fix parent ID lookup in import geojson - * Support Dialog360 Cloud API channels +## v3.0.151 -v8.1.193 (2023-06-14) -------------------------- - * Add surveyor icon +- Convert all squashable and prunable models to use big primary keys -v8.1.192 (2023-06-14) -------------------------- - * Add icons for flows, fix issue with some spload fires +## v3.0.150 -v8.1.191 (2023-06-13) -------------------------- - * Broadcast update via wizard and updated list styling +- Drop database-level length restrictions on msg and values +- Add sender ID config for Macrokiosk channels +- Expose org credit information on API org endpoint +- Add contact_uuid parameter to update FCM user +- Add configurable webhook header fields -v8.1.190 (2023-06-12) -------------------------- - * Add agent_access to API fields endpoint - * Restrict agent users view of field values on API contacts endpoint - * Remove use of django tags inside javascript +## v3.0.148 -v8.1.189 (2023-06-12) -------------------------- - * Fix broken list view template - * Add djlint and latest django-hamlpy +- Fix simulator with attachments +- Switch to using new recent messages model -v8.1.188 (2023-06-09) -------------------------- - * Tweak contact field access backfill migration +## v3.0.147 -v8.1.187 (2023-06-09) -------------------------- - * Add ContactField.agent_access and backfill to view - * Use statement level triggers for tracking current node counts - * Remove old scheduled broadcast create view +- Migration to populate FlowPathRecentMessage +- Clip messages to 640 chars for recent messages table -v8.1.186 (2023-06-08) -------------------------- - * Format api_root.html and fix errors - * Fix channel log pretty printing +## v3.0.145 -v8.1.183 (2023-06-08) -------------------------- - * Add djLint config - * Add basic wizard support +- Change Macrokiosk time format to not have space +- Better error message for external channel handler for wrong time format +- Add new model for tracking recent messages on flow path segments -v8.1.182 (2023-06-08) -------------------------- - * Support imports with Status column - * Make viewer role users a feature that can be toggled - * Allow exporting of blocked, stopped and archived contacts +## v3.0.144 -v8.1.181 (2023-06-07) -------------------------- - * Add redact_values for FBA and IG channel types - * Remove unused code for legacy UI contact read and list pages - * Rework channel log anonymization so even staff users have to explicitly break out of it - * Rework channel log rendering to start from JSONified version - * Fix adding queued braodcasts to Outbox view and counts - * Cleanup db triggers for broadcasts - -v8.1.180 (2023-06-05) -------------------------- - * Fix failed message resending and archived message deletion +- Remove Msg.media field that was replaced by Msg.attachments +- Change default ivr timeouts to 2m +- Fix the content-type for Twilio call response -v8.1.179 (2023-06-05) -------------------------- - * Drop ChannelLog.msg and .call +## v3.0.143 -v8.1.178 (2023-06-05) -------------------------- - * Bump cryptography from 39.0.2 to 41.0.0 - * Stop reading from ChannelLog.msg and .call - * Use per-statement db triggers for system label counts +- Update contact read page and inbox views to show multiple message attachments +- Fix use of videojs to provide consistent video playback across browsers +- API should return error message if user provides something unparseable for a non-serializer param -v8.1.177 (2023-06-02) -------------------------- - * Remove dupe from changelog +## v3.0.142 -v8.1.176 (2023-06-02) -------------------------- - * Add some blocks on main templates +- Fix handling of old msg structs with no attachments attribute +- Tweak in create_outgoing to prevent possible NPEs in flow execution +- Switch to using Msg.attachments instead of Msg.media +- Replace index on Value.string_value with one that is limited to first 32 chars -v8.1.175 (2023-06-02) -------------------------- - * Add select all on list pages +## v3.0.139 -v8.1.174 (2023-06-01) -------------------------- - * Noop when releasing an already released org - * Rework and simplify channel count db triggers +- Fix Macrokiosk JSON responses -v8.1.173 (2023-06-01) -------------------------- - * Remove support for filtering channel logs by folder +## v3.0.138 -v8.1.171 (2023-05-31) -------------------------- - * Add index on channellog.uuid - * Impove and expose the call list view +- Migration to populate attachments field on old messages -v8.1.170 (2023-05-31) -------------------------- - * Remove rendering of contact history as template now that new UI only consumes it as JSON - * Fix inbox msg type for Android channels +## v3.0.137 -v8.1.169 (2023-05-30) -------------------------- - * Allow call count backfill migration to be called offline - * Fix ivr call trigger migration - * Remove unused stuff from inbox views +- Don't assume event fires still exist in process_fire_events +- Add new Msg.attachments field to hold multiple attachments on an incoming message -v8.1.168 (2023-05-30) -------------------------- - * Add data migration to backfill ivr call counts +## v3.0.136 -v8.1.167 (2023-05-29) -------------------------- - * Add DB triggers to track counts of calls as a new system label +- Fix scheduled broadcast text display -v8.1.166 (2023-05-29) -------------------------- - * Stop writing SystemLabelCount.is_archived so it can be dropped +## v3.0.135 -v8.1.165 (2023-05-29) -------------------------- - * Always write system label counts with is_archived=False and make field nullable +- Make 'only' keyword triggers ignore punctuation +- Make check_campaigns_task lock on the event fires that it will queue +- Break up flow event fires into sub-batches of 500 +- Ignore and ack incoming messages from Android relayer that have no number -v8.1.164 (2023-05-29) -------------------------- - * Add data migration to delete old system label counts for is_archived=true because they're no longer updated - * Fix getting FB business ID for WAC channels +## v3.0.134 -v8.1.163 (2023-05-25) -------------------------- - * Return empty sample/fields on preview_start endpoint until contactsearch component is updated +- Add match_type option to triggers so users can create triggers which only match when message only contains keyword +- Allow Africa's talking to retry sending message +- Allow search on the triggers pages +- Clear results for analytics when user removes a flow run -v8.1.162 (2023-05-25) -------------------------- - * Add BroadcastCRUDL.Preview - * Fix broadcast send history template +## v3.0.133 -v8.1.161 (2023-05-24) -------------------------- - * User orgs based on request - * Switch brand array to dict - * Move plivo connect view to channel type +- Make Msg.get_sync_commands more efficent +- Fix open range airtime transfers +- Fix multiple Android channels sync +- Fix parsing of macrokiosk channel time format +- Ensure that our select2 boxes show "Add new" option even if there is a partial match with an existing item +- Switch to new translatable fields and remove old Broadcast fields +- Add Firebase Cloud messaging support for Android channels -v8.1.160 (2023-05-19) -------------------------- - * Fix field update and deleting with same key +## v3.0.132 -v8.1.159 (2023-05-19) -------------------------- - * Don't allow horizontal scroll by default +- Migration to populate new translatable fields on old broadcasts. This migration is slow on a large database so it's + recommended that large deployments fake it and run it manually. -v8.1.158 (2023-05-19) -------------------------- - * Fix scrolling for content pages without full height - * Tweak how we run python scripts in CI +## v3.0.128 -v8.1.157 (2023-05-18) -------------------------- - * Add ticket editing - * Remove old ticket assign view and support for notes with assignment - * Add ticket topic menu and resizer - * Move WAC connect view to the WhatsApp cloud channel type package - * Remove accounts formax from workspace view as it isn't needed with new UI +- Add new translatable fields to Broadcast and ensure they're populated for new stuff -v8.1.156 (2023-05-17) -------------------------- - * Update components for 302 fix - * Make post_url work identically to posterize +## v3.0.127 -v8.1.155 (2023-05-17) -------------------------- - * Better handling of post_url for spa content menu - * Really fix hiding surveyor form +- Fix autocomplete for items containing digits or other items +- Make autocomplete dropdown disappear when user clicks in input box +- Replace usages of "SMS" with "message" in editor +- Allow same subflow to be called without pause in between -v8.1.154 (2023-05-17) -------------------------- - * Hide the surveyor password input and not just the help texti - * Fix URLs in JS files +## v3.0.126 -v8.1.153 (2023-05-17) -------------------------- - * Move channel type constants to the channel type class - * Don't show option to enter surveyor password if surveyor feature not enabled - * Scoped javascript for flow broadcast modal +- Fix exporting messages by a label folder +- Improve performance of org export page for large orgs +- Make it easier to enable/disable debug toolbar +- Increase channel logging for requests and responses +- Change contact api v1 to insert nonexistent fields +- Graceful termination of USSD sessions -v8.1.152 (2023-05-15) -------------------------- - * Make js function name unique - * Fix no_nav extra-script blocks +## v3.0.125 -v8.1.151 (2023-05-15) -------------------------- - * Fix the API explorer scripts and styles blocks +- Don't show deleted flows on list page +- Convert timestamps sent by MacroKiosk from local Kuala Lumpur time -v8.1.150 (2023-05-15) -------------------------- - * Cleanup broken or unused posterized links - * Drop old flow start fields +## v3.0.124 -v8.1.149 (2023-05-14) -------------------------- - * Fix signups +- Move initial IVR expiration check to status update on the call +- Hide request time in channel log if unset +- Check the existance of broadcast recipients before adding +- Voice flows import should never allow expirations longer than 15 mins +- Fix parse location to correctly use the tokenizized text if the location was matched for the entire text +- Use updates instead of full Channel saves() on realyer syncs, only update when there are changes -v8.1.148 (2023-05-12) -------------------------- - * Fix backwards compat for send message to somebody else +## v3.0.123 -v8.1.147 (2023-05-12) -------------------------- - * Fix flow refresh and global redirect hook +- Use flow starts for triggers that operate on groups +- Handle throttling errors from Nexmo when using API to add new numbers +- Convert campaign event messages to HSTORE fields -v8.1.146 (2023-05-12) -------------------------- - * Add some null checks for frame selectors +## v3.0.121 -v8.1.145 (2023-05-11) -------------------------- - * Fix width for other views and posterize on choose +- Add MACROKIOSK channel type +- Show media for MMS in simulator -v8.1.144 (2023-05-11) -------------------------- - * Fix login width - * Tweak Somleng claim blurb +## v3.0.120 -v8.1.143 (2023-05-11) -------------------------- - * Stop reading from old FlowStart fields - * Merge and clean up main frame - * Rename Twiml API channel to Somleng +- Fix send all bug where we append list of messages to another list of messages +- Flows endpooint should allow filtering by modified_on -v8.1.142 (2023-05-11) -------------------------- - * Add base mixin for channel type specific views that gives access to the type class - * Update components and editor to support compose for somebody else - * Move vonage connect view to the channel type - * Allow deleting of archived triggers +## v3.0.119 -v8.1.141 (2023-05-10) -------------------------- - * Fix contacts title - * Fix vanilla landing - * Remove lessblock and replace with compiled css - * Bump django from 4.1.7 to 4.1.9 +- More vertical form styling tweaks -v8.1.140 (2023-05-09) -------------------------- - * Fix ticket padding - * Remove remaining spa files - * Add link to reset the latest credentials - * Preset channel connection +## v3.0.118 -v8.1.139 (2023-05-09) -------------------------- - * Add blocked icon +- Add flow link on subflow rulesets in flows -v8.1.138 (2023-05-09) -------------------------- - * Update labeling to use temba-checkbox and remove jQuery - * Fix trim_channel_logs config and rework so task olny runs for an hour max - * Change test_db to create single org at a time +## v3.0.117 -v8.1.137 (2023-05-09) -------------------------- - * Add exclusions and params fields to FlowStart and start writing them +- Fix styling on campaign event modal -v8.1.136 (2023-05-09) -------------------------- - * Don't include brand variables in less node +## v3.0.116 -v8.1.135 (2023-05-09) -------------------------- - * Remove references to old icon set - * Remove unused jquery bits and intercooler - * Remove bootstrap +- Update to latest Raven +- Make default form vertical, remove horizontal to vertical css overrides +- Add flow run search and deletion +- Hangup calls on channels release -v8.1.134 (2023-05-08) -------------------------- - * Remove no longer used perms - * Remove any old non-spa templates not being extended by the spa version - * Remove is_spa logic from templates - * Remove old contact update fields views +## v3.0.115 -v8.1.133 (2023-05-05) -------------------------- - * Add default color +- Allow message exports by label, system label or all messages +- Fix for double stacked subflows with immediate exits -v8.1.132 (2023-05-05) -------------------------- - * Remove settings turd +## v3.0.112 -v8.1.131 (2023-05-05) -------------------------- - * Remove old nav from landing page +- Archiving a flow should interrupt all the current runs -v8.1.130 (2023-05-04) -------------------------- - * Remove spa checking in views +## v3.0.111 -v8.1.129 (2023-05-04) -------------------------- - * Remove JSON view to list notifications now that has moved to the internal API - * Remove non-spa items from content menus +- Display webhook results on contact history +- Clean up template tags used on contact history +- Allow broadcasts to be sent to all urns belonging to the specified contacts -v8.1.128 (2023-05-03) -------------------------- - * Fix contact import +## v3.0.109 -v8.1.127 (2023-05-03) -------------------------- - * Remove support for adding bulk sender delegate channels - * Remove ability to create IVR delegates for android channels - * Remove org home view altogether and update links to point to workspace view +- Data migration to populate broadcast send_all field -v8.1.126 (2023-05-03) -------------------------- - * Change cookie checking for UI so that we always default to new UI - * Add color picker widget - * Remove ability to store twilio credentials on the org +## v3.0.108 -v8.1.125 (2023-05-02) -------------------------- - * Tweak notifications index to match API endpoint - * Add new internal API with a notifications endpoint - * Use DRF defaults for STRICT_JSON and UNICODE_JSON - * Remove unused .api URL suffixes +- Add webhook events trim task with configurable retain times for success and error logs -v8.1.124 (2023-05-01) -------------------------- - * Make contact.modify work with new and old format - * Make ticket a reserved field name +## v3.0.107 -v8.1.123 (2023-04-27) -------------------------- - * Hide Open Ticket option on contact read page if there's already an open a ticket - * Rework soft and hard msg deleting to be more performant +- Add send_all broadcast field -v8.1.122 (2023-04-26) -------------------------- - * Remove db constriants on Msg.flow and Msg.ticket +## v3.0.106 -v8.1.121 (2023-04-26) -------------------------- - * Tweak migration dependency - * Show counts of tickets by topic on tickets menu +- Remove non_atomic_gets and display message at /api/v1/ to explain API v1 has been replaced +- Add squashable model for label counts +- Split system label functionality into SystemLabel and SystemLabelCount -v8.1.120 (2023-04-25) -------------------------- - * Add topic counts to the API endpoint - * Add undocumented param to contacts API endpoint which allows URNs to be expanded - * Data migration to backfill ticket counts by topic +## v3.0.105 -v8.1.119 (2023-04-25) -------------------------- - * Start writing ticket counts for topics +- Link subflow starts in actions +- Allow wait to wait in flows with warning -v8.1.118 (2023-04-24) -------------------------- - * Fix deleting of flows and tickets which are referenced by messages - * Fix pattern match for folder uuid - * Stop writing TicketCount.assignee +## v3.0.104 -v8.1.117 (2023-04-24) -------------------------- - * Stop reading from TicketCount.assignee +- Add new has email test, contains phrase test and contains only phrase test -v8.1.116 (2023-04-21) -------------------------- - * Add more channel icons +## v3.0.103 -v8.1.115 (2023-04-21) -------------------------- - * Update icons - * Add ticket topic folders +- Migration to populate FlowNodeCount shouldn't include test contacts -v8.1.114 (2023-04-20) -------------------------- - * Add migration to backfill TicketCount.scope +## v3.0.102 -v8.1.113 (2023-04-20) -------------------------- - * Add scope field to TicketCount and start writing +- Add migration to populate FlowNodeCount -v8.1.112 (2023-04-20) -------------------------- - * Dropdowns for slow clickers - * Tighten up animations - * Use services for redis, elastic and postgres in CI +## v3.0.101 -v8.1.111 (2023-04-18) -------------------------- - * Fix and archive keyword triggers with no match_type +- Migration to clear no-longer-used flow stats redis keys +- Replace remaining cache-based flow stats code with trigger based FlowNodeCount -v8.1.110 (2023-04-18) -------------------------- - * Prefetch flows on message views and make titles consistent +## v3.0.100 -v8.1.109 (2023-04-18) -------------------------- - * Add links for menu, add flow badge, update label badges - * Remove Chikka channel type which no longer exists - * Update mailroom_db command to allow connecting to non-file socket postgres +- Fix intermittently failing Twilio test +- make sure calls have expiration on initiation +- Update to latest smartmin +- Add redirection for v1 endpoints +- Fix webhook docs +- Fix MsgCreateSerializer not using specified channel +- Test coverage +- Fix test coverage issues caused by removing API v1 tests +- Ensure surveyor users still have access to the API v2 endpoint thats they need +- Remove djangorestframework-xml +- Restrict API v1 access to surveyor users +- Block all API v2 writes for suspended orgs +- Remove all parts of API v1 not used by Surveyor -v8.1.108 (2023-04-17) -------------------------- - * Add ticket field to msg model +## v3.0.99 -v8.1.107 (2023-04-13) -------------------------- - * Allow deleting of groups used in triggers +- Prioritize msg handling over timeotus and event fires +- Remove hamlcompress command as deployments should use regular compress these days +- Fix not correctly refreshing dynamic groups when a URN is removed +- Allow searching for contacts _with any_ value for a given field -v8.1.106 (2023-04-13) -------------------------- - * Don't show topics on tickets until clicked +## v3.0.98 -v8.1.105 (2023-04-12) -------------------------- - * Fix js items on context menus +- Fix sidebar nav LESS so that level2 lists don't have fixed height and separate scrolling +- Unstop a contact when we get an explicit user interaction such as follow -v8.1.104 (2023-04-11) -------------------------- - * Do not display schedule events for archived triggers - * Don't require db superuser for test_db command - * Make ticket banner expandable +## v3.0.96 -v8.1.103 (2023-04-10) -------------------------- - * Fix urls when searching and paging - * Follow message on auto assign for unassigned folder +- Fix possible race condition between receiving and handling messages +- Do away with scheme for USSD, will always be TEL +- Make sure events are handled properly for USSD +- Do not specify to & from when using reply_to +- Update JunebugForm for editing Junebug Channel + config fields -v8.1.102 (2023-04-10) -------------------------- - * Add contact details pane, hide empty tabs - * Auto assign tickets when sending messages - * Add nicer ticket assignment using temba-contact-tickets component - * Fix deleting of orgs with incidents +## v3.0.95 -v8.1.101 (2023-04-06) -------------------------- - * Add field search handler on tickets +- Log request time on channel log success -v8.1.100 (2023-04-06) -------------------------- - * Add fields to tickets +## v3.0.94 -v8.1.99 (2023-04-06) -------------------------- - * Add test util to make it easier to mess with brands - * Drop Org.stripe_customer_id +- Fix test, fix template tags -v8.1.98 (2023-04-06) -------------------------- - * Link contact name on tickets to the contact page if permitted - * Drop Org.plan, plan_start and plan_end +## v3.0.93 -v8.1.97 (2023-04-05) -------------------------- - * Pull tickets out of contact chat - * Scheduled messages to broadcasts with compose widget +- Change request times to be in ms instead of seconds -v8.1.96 (2023-04-03) -------------------------- - * Stop reading Org.plan and .plan_end - * Bump redis from 4.5.3 to 4.5.4 +## v3.0.92 -v8.1.95 (2023-03-31) -------------------------- - * Fix temba-store race on load +- Block on handling incoming msgs so we dont process them forever away +- Include Viber channels in new conversation trigger form channel choices -v8.1.94 (2023-03-29) -------------------------- - * Bump version of openpyxl +## v3.0.90 -v8.1.93 (2023-03-29) -------------------------- - * Update Excel reading dependencies +- Don't use cache+calculations for flow segment counts - these are pre-calculated in FlowPathCount +- Do not include active contacts in flows unless user overrides it +- Clean up middleware imports and add tests +- Feedback to user when simulating a USSD channel without a USSD channel connected -v8.1.92 (2023-03-29) -------------------------- - * Use unittests.mock.Mock in tests instead of custom mock_object +## v3.0.89 -v8.1.91 (2023-03-28) -------------------------- - * Upgrade redis library version +- Expand base64 charset, fix decode validity heuristic -v8.1.90 (2023-03-27) -------------------------- - * NOOP instead of assert if archiving msg which is already archived etc +## v3.0.88 -v8.1.89 (2023-03-27) -------------------------- - * Do not fail to release channel when missing mtn subscription id in config - * Add incident type for org suspension +- Deal with Twilio arbitrarily sending messages as base64 +- Allow configuration of max text size via settings -v8.1.88 (2023-03-23) -------------------------- - * Fix suspending and unsuspending orgs so that it correctly updates children - * Use a name for the active org that doesn't collide +## v3.0.87 -v8.1.87 (2023-03-23) -------------------------- - * Manually fix version number +- Set higher priority when sending responses through Kannel -v8.1.86 (2023-03-23) -------------------------- - * Fix scrolling on WhatsApp templates page +## v3.0.86 -v8.1.85 (2023-03-23) -------------------------- - * Handle short screens better on run list page +- Do not add stopped contacts to groups when importing +- Fix an entire flow start batch failing if one run throws an exception +- Limit images file size to be less than 500kB +- Send Facebook message attachments in a different request as the text message +- Include skuid for open range tranfertto accounts -v8.1.84 (2023-03-22) -------------------------- - * Update to coverage 7.x +## v3.0.85 -v8.1.83 (2023-03-22) -------------------------- - * Use onSpload to wire handlers on account form +- Fix exception when handling Viber msg with no text +- Migration to remove no longer used ContactGroup.count +- Fix search queries like 'foo bar' where there are more than one condition on name/URN +- Add indexes for Contact.name and ContactURN.path +- Replace current omnibox search function with faster and simpler top-25-of-each-type approach -v8.1.82 (2023-03-22) -------------------------- - * Support setting and removing the subscription URL for MTN channels +## v3.0.84 -v8.1.81 (2023-03-21) -------------------------- - * Update ruff and isort +- Fix Line, FCM icons, add Junebug icon -v8.1.80 (2023-03-21) -------------------------- - * Update black +## v3.0.83 -v8.1.79 (2023-03-20) -------------------------- - * Add mouseover text for temba-date - * Reload page on org mismatch - * Use embedded title instead of response header +- Render missing field and URN values as "--" rather than "None" on Contact list page -v8.1.78 (2023-03-20) -------------------------- - * Add globals to new ui - * Make it harder to accidentally delete an org - * Rewrite org deletion test and fix deletion issues +## v3.0.82 -v8.1.77 (2023-03-16) -------------------------- - * Limit groups to a single line on contact page +- Add ROLE_USSD +- Add Junebug USSD Channel +- Fix Vumi USSD to use USSD Role -v8.1.76 (2023-03-16) -------------------------- - * Remove unused fields and indexes on broadcast model - * Reload page on version mismatch - * Add support for MTN Developer Portal channel +## v3.0.81 -v8.1.75 (2023-03-16) -------------------------- - * Add menu path for org export and import - * Fix legacy goto function for old UI - * Warn users who go back to the old interface - * Remove support for broadcasts with associated tickets +- Archive triggers that do not have a contact to send to +- Disable sending of messages for blocked and stopped contacts -v8.1.74 (2023-03-15) -------------------------- - * Show version number on public index page - * Add poetry plugin to maintain version number in temba/__init__.py - * Fix textinput inner scrolling +## v3.0.80 -v8.1.73 (2023-03-15) -------------------------- - * Stop returning type=flow|inbox on messages endpoint - * Cleanup location app models +- Add support for outbound media on reply messages for Twilio MMS (US, CA), Telegram, and Facebook +- Do not throw when viber sends us message missing the media +- Optimizations around Contact searching +- Send flow UUID with webhook flow events -v8.1.72 (2023-03-14) -------------------------- - * Convert Org.config and Channel.config to be real JSON +## v3.0.78 -v8.1.71 (2023-03-14) -------------------------- - * Strip out invalid HTTP header characters from page title response headers - * Fix mailroom db command to patch uuid generation after migrations are run - * Expose flow on messages API endpoint +- Allow configuration of max message length to split on for External channels -v8.1.70 (2023-03-13) -------------------------- - * Broad support for meta click for new tabs - * Make Org.config and Channel.config non-null +## v3.0.77 -v8.1.69 (2023-03-13) -------------------------- - * Simplify use of config fields on channel update forms - * Fix alias editor to use the new UI frame - * Support updating Twilio credentials for T, TMS and TWA channels +- Use brand key for evaluation instead of host when determining brand +- Add red rabbit type (hidden since MT only) +- Fix flow results exports for broadcast only flows -v8.1.68 (2023-03-13) -------------------------- - * Rework messages and broadcasts API endpoints to accept media ojects UUIDs as attachments - * Make Msg.uuid and msg_type non-null +## v3.0.76 -v8.1.67 (2023-03-10) -------------------------- - * Fix layering for menu +- Log Nexmo media responses without including entire body -v8.1.66 (2023-03-09) -------------------------- - * Fix initial editor load - * Schedule message validation +## v3.0.75 -v8.1.65 (2023-03-09) -------------------------- - * Update endpoints for messages and media +- Dont encode to utf8 for XML and JSON since they expect unicode +- Optimize contact searching when used to determine single contact's membership +- Use flow system user when migrating flows, avoid list page reorder after migrations -v8.1.64 (2023-03-08) -------------------------- - * Tweak layout for editor - * Cleanup fail_old_messages task. Use correct statuses and return number failed. +## v3.0.74 -v8.1.63 (2023-03-08) -------------------------- - * Adjust export download page for new UI - * Make media list page (still staff only) filter by org and add index +- reduce number of lookup to DB -v8.1.62 (2023-03-08) -------------------------- - * Small z-index tweak +## v3.0.73 -v8.1.61 (2023-03-07) -------------------------- - * Tweak simulator placement in new ui +- Add test case for search URL against empty field value +- Fix sending vumi messages initiated from RapidPro without response to -v8.1.60 (2023-03-07) -------------------------- - * Encourage users to try the new interface - * Add lightbox for contact history +## v3.0.72 -v8.1.59 (2023-03-07) -------------------------- - * Rework code depending on msg_type=I|F +- Improvements to external channels to allow configuration against JSON and XML endpoints +- Exclude test contacts from flow results +- Update to latest smartmin to fix empty string searching -v8.1.58 (2023-03-07) -------------------------- - * Add missing channels migration - * Use msg.created_by if set in ticket list view - * Remove SMS type channel alerts +## v3.0.70 -v8.1.57 (2023-03-06) -------------------------- - * Move index on msg.external_id onto the model +- Allow USSD flows to start someone else in a flow +- Include reply to external_id for Vumi channel -v8.1.56 (2023-03-06) -------------------------- - * Fix soft deleting of scheduled messages so schedule is deleted too - * Stop saving JSONAsTextField values as null for empty dicts and lists - * Update select s3 usage for msg exports to not rely on type=inbox|flow - * Add created_by to Msg and populate on events in contact histories +## v3.0.69 -v8.1.55 (2023-03-02) -------------------------- - * Fix import for sync fcm task - * Create new filters and partial indexes for Inbox, Flows and Archived +- Add ID column to result exports for anon orgs +- Deactivate runs when releasing flows +- Fix urn display for call log +- Increased send and receive channel logging for Nexmo, Twilio, Twitter and Telegram +- Allow payments through Bitcoins +- Include TransferTo account currency when asking phone info to TransferTo +- Don't create inbound messages for gather timeouts, letting calls expire +- Don't show channel log for inactive channels on contact history +- Upgrade to latest smartmin which changes created_on/modified_on fields on SmartModels to be overridable +- Uniform call and message logs -v8.1.54 (2023-03-02) -------------------------- - * Fix enter on compose +## v3.0.64 -v8.1.53 (2023-03-01) -------------------------- - * Add compose component to contact chat - * Pixel tweak on contact read page - * Move more Android relayer code out of Channel +- Add ID column to anonymous org contact exports, also add @contact.id field in message context +- Fix counts for channel log elements +- Only have one link on channel page for sending log +- Attempt to determine file types for msg attachments using libmagic +- Deactivate runs on hangups, Keep ivr runs open on exit +- Add log for nexmo media download +- Add new perf_test command to run performance tests on database generated with make_test_db -v8.1.52 (2023-03-01) -------------------------- - * Simplify what we display for Android channels on read page +## v3.0.62 -v8.1.50 (2023-02-28) -------------------------- - * Make spload universal +- Fix preferred channels for non-msg channels -v8.1.49 (2023-02-28) -------------------------- - * Make spload work on formax pages +## v3.0.61 -v8.1.48 (2023-02-28) -------------------------- - * Add more goto(event) - * Fix content differing from page-load vs inline load - * Add page title for spa response headers - * Clean up subtitles on spa pages - * Add link to flow starts (and clean up list page styling) - * Add link for webhook calls (and cleanup styling here too) - * Update styling for log pages for both old / new ui - -v8.1.47 (2023-02-27) -------------------------- - * Be less clever with page titles. Fix label js errors. - * Make sure tests can run without making requests to external URLs - * Unpublicize folder=incoming on messages API docs and re-add index with status=H +- Make migrations to populate new export task fields non-atomic +- Add indexes for admin boundaries and aliases +- Nexmo: make sure calls are ended on hangup, log hangups and media +- Fix inbound calls on Nexmo to use conversation_uuid +- Style tweaks for zapier widget +- Use shorter timeout for IVR +- Issue hangups on expiration during IVR runs +- Catch all exceptions and log them when initiating call +- Fix update status for Nexmo calls -v8.1.46 (2023-02-23) -------------------------- - * Fix external links in old ui +## v3.0.48 -v8.1.45 (2023-02-23) -------------------------- - * Fix external channel links - * No longer intercept clicks in spa-content - * Cleanup Channel model fields - * Fix channel claim external URLs in new UI +- Add channel session log page +- Use brand variable for zaps to show +- Additional logging for nexmo +- Increase non-overlap on timeout queueing, never double queue single timeout +- Fix broken timeout handling when there is a race +- Make field_keys a required parameter +- Speed up the contact import by handling contact update at once after all the fields are set -v8.1.44 (2023-02-23) -------------------------- - * Exclude PENDING messages in contact history and API by org and contact - * Add -id to msg fetch ordering in Contact.get_history - * For both messages and tickets, replace the default indexes on org and contact with indexes that match the API ordering +## v3.0.47 -v8.1.43 (2023-02-23) -------------------------- - * Use statement level db trigger for broadcast msg counts - * Update django to 4.1.7 +- Add channel log for Nexmo call initiation +- Fix import-geojson management command -v8.1.42 (2023-02-22) -------------------------- - * Only look at queued messages when syncing android channels - * Re-add Msg.STATUS_INITIALIZING to use for outgoing messages which fail to queue - * Include STATUS_ERRORED messages in Outbox views +## v3.0.46 -v8.1.41 (2023-02-22) -------------------------- - * Remove suprious property +- Fix Contact.search so it doesn't evaluate the base_query +- Enable searching in groups and blocked/stopped contacts -v8.1.40 (2023-02-22) -------------------------- - * Fix contact imports in new ui - * Fix menu refresh race - * Remove window.lastFetch - * Adjust menu paths for new UI channel views - * Use SpaMixin to more channels extra views +## v3.0.45 -v8.1.39 (2023-02-22) -------------------------- - * Move Msg.update into android package - * Make text optional on broadcasts endpoint (messages need text or attachments) +- Fix absolute positioning for account creation form +- Add Line channel icon in fonts +- Add data migrations to update org config to connect to Nexmo -v8.1.38 (2023-02-21) -------------------------- - * Fix dashboard not loading when content - * Fix handling FCM sync failure +## v3.0.43 -v8.1.37 (2023-02-21) -------------------------- - * Don't lookup related fields in API if lookup value type is wrong - * Update django 4.0.10 - * Fetching sent folder on messages endpoint should return messages ordered by -sent_on same as UI - * Exclude unhandled messages from Incoming folder on messages API endpoint - * More agressive menu refreshing - * Move much of the old android relayer code into its own package - * Add media API endpoint, undocumented for now - * Open up new UI access to everyone - -v8.1.36 (2023-02-20) -------------------------- - * Cleanup use of validators in the API - * Add support for Msg.TYPE_TEXT to be used (for now) for outgoing messages +- Add Malawi as a country for Africa's Talking -v8.1.35 (2023-02-17) -------------------------- - * Add org start redirection view - * Convert Attachment to be a dataclass - * Rework msg write serializer to create a transient Msg instance that the read serializer can use without hitting the db - * Add unpublicized API endpoint to send a single message - * Add msg_send to mailroom client +## v3.0.42 -v8.1.34 (2023-02-16) -------------------------- - * Drop raw_urns field on Broadcast - * Pass group id instead of uuid to contact_search mailroom endpoint - * Remove unused expression_migrate from mailroom client +- Widen pages to browser width so more can fit +- Fix the display of URNs on contact list page +- Fix searching of Nexmo number on connected accounts -v8.1.33 (2023-02-15) -------------------------- - * Fix routing of current workspace to settings - * Add Broadcast.urns which matches the JSON and FlowStart.urns +## v3.0.41 -v8.1.32 (2023-02-14) -------------------------- - * Drop Broadcast.urns and .send_all +- Fix channel countries being duplicated for airtime configuration +- Add make_sql command to generate SQL files for an app, reorganize current SQL reference files +- Added SquashableModel and use it for all squashable count classes -v8.1.30 (2023-02-13) -------------------------- - * Fix keyword triggers match type +## v3.0.40 -v8.1.29 (2023-02-13) -------------------------- - * Fix omnibox search for anon org to allow search by contact name - * Prepare to drop Broadcast.send_all and .urns +- Add support for Nexmo IVR +- Log IVR interactions in Channel Log -v8.1.27 (2023-02-10) -------------------------- - * Move all form text from Trigger model to forms - * Add migration to convert URNs to contacts on scheduled broadcasts +## v3.0.37 -v8.1.26 (2023-02-10) -------------------------- - * Remove returning specific URNs from omniboxes and instead match contacts by URN - * Rework spa menu eliminate mapping +- Fix to make label of open ended response be All Response even if there is timeout on the ruleset +- Data migration to rename category for old Values collected with timeouts -v8.1.25 (2023-02-09) -------------------------- - * Remove support for unused v1 omnibox format - * Update broadcasts API endpoint to support attachments +## v3.0.36 -v8.1.24 (2023-02-08) -------------------------- - * Update to latest cryptography library - * Add task to interrupt flow sessions after 90 days +- Add 256 keys to @extra, also enforce ordering so it is predictible which are included +- Make fetching flow run stats more efficient and expose number of active runs on flow run endpoint +- Migration to populate session on msg and ended_on where it is missing -v8.1.23 (2023-02-06) -------------------------- - * Fix flow results redirecting to it's own page - * Make sure WA numbers can only be claimed once +## v3.0.35 -v8.1.22 (2023-02-06) -------------------------- - * Update to latest django to get security fix +- Offline context per brand -v8.1.21 (2023-02-06) -------------------------- - * Fix export > import path on new ui - * Fix login redirects from pjax calls +## v3.0.34 -v8.1.20 (2023-02-02) -------------------------- - * Add servicing menu on org read +- Add Junebug channel type +- Better base styling for dev project +- Pass charset parameter to Kannel when sending unicode +- Zero out minutes, seconds, ms for campaign events with set delivery horus +- Add other URN types to contact context, return '' if missing, '\*' mask for anon orgs +- Make sure Campaigns export base_language for simple message events, honor on import -v8.1.19 (2023-02-01) -------------------------- - * Add Msg.quick_replies - * Add Broadcast.query - * More generic servicing for staff users +## v3.0.33 -v8.1.18 (2023-02-01) -------------------------- - * Drop un-used Media.name field +- Change ansible command run on vagrant up from syncdb to migrate +- Remove no longer needed django-modeltranslation +- Keep up to 256 extra keys from webhooks instead of 128 +- Add documentation of API rate limiting -v8.1.17 (2023-01-31) -------------------------- - * Fix modax from menu bug +## v3.0.32 -v8.1.15 (2023-01-30) -------------------------- - * Add new org chooser with avatars in new UI - * Add dashboard to menu in new UI +- Make styling variables uniform across branding +- Make brand styling optional -v8.1.14 (2023-01-27) -------------------------- - * Add ordering support for filters - * Fix redirect ping pong when managing orgs - * Tweak inspect_flows command to report spec veresion mismatches +## v3.0.28 -v8.1.13 (2023-01-26) -------------------------- - * Update flow editor +- Add support for subflows over IVR -v8.1.12 (2023-01-26) -------------------------- - * Add locale field to Msg +## v3.0.27 -v8.1.11 (2023-01-25) -------------------------- - * Add migration to alter flow language field to first update any remaining flows with 'base' +- Fix searching for Twilio numbers, add unit tests +- Fix API v1 run serialization when step messages are purged -v8.1.10 (2023-01-25) -------------------------- - * Require flow and broadcast base languages to 3 letters - * Require broadcast.translations to be non-null +## v3.0.26 -v8.1.9 (2023-01-25) -------------------------- - * Drop unused broadcast fields +- Adds more substitutions from accented characters to gsm7 plain characters -v8.1.8 (2023-01-24) -------------------------- - * Make Broadcast.text nullable and stop writing it +## v3.0.25 -v8.1.7 (2023-01-24) -------------------------- - * Stop reading from Broadcast.text +- Populate ended_on for ivr calls +- Add session foreign key to Msg model -v8.1.6 (2023-01-23) -------------------------- - * Fix campaign imports so we don't import base as a language - * Increase max-width for channel configuration page - * Support bandwidth channel type +## v3.0.24 -v8.1.5 (2023-01-23) -------------------------- - * Data migration to backfill broadcast.translations and replace base with und +- Fix bug in starting calls from sessions -v8.1.4 (2023-01-20) -------------------------- - * Update campaign message events with language base - * Make servicing to use posterize +## v3.0.23 -v8.1.3 (2023-01-19) -------------------------- - * Tweak broadcasts API endpoint so it filters by is_active and hits index - * Fix indexes used for tickets API endpoint - * Remove unused indexes on contacts_contact - * Bump engine version to 13.2 +- Remove flow from ChannelSession, sessions can span many runs/flows +- Remove superfluous channelsession.parent -v8.1.2 (2023-01-19) -------------------------- - * Fixes for content menu changes - * Fix test_db to create orgs with flow languages +## v3.0.22 -v8.1.1 (2023-01-18) -------------------------- - * Restrict creating surveyor flows unless that is enabled as a feature - * Always create braodcasts with status = QUEUED, create index for fetching queued broadcasts - * Add new translations JSON field to broadcasts and start writing it - * Remove support for creating broadcasts with legacy expressions - * New content menu component +- Migration to update existing twiml apps with a status_callback, remove api/v1 references -v8.1.0 (2023-01-17) -------------------------- - * Update contact import styling - * Implement squashed migrations - * Stop trimming flow starts as this will be handled by archiver +## v3.0.21 -v8.0.1 (2023-01-12) -------------------------- - * Tweak migration dependencies to ensure clean installs run them in order that works - * Add empty migrations required for squashing +- Various tweaks to wording and presentation around custom SMTP email config -v8.0.0 (2023-01-10) -------------------------- - * Update deps +## v3.0.20 -v7.5.149 (2023-01-10) -------------------------- - * Drop FlowRunCount model +- Allow orgs to set their own SMTP server for outgoing emails +- Return better error message when To number not passed to Twilio handler +- Exclude Flow webhook events from retries (we try once and forget) +- Don't pass channel in webhook events if we don't know it +- Use JsonResponse and response.json() consistently +- Replace json.loads(response.content) with response.json() which properly decodes on Python 3 -v7.5.148 (2023-01-09) -------------------------- - * Stop squashing FlowRunCount - * Add misisng index on FlowRunStatusCount and rework get_category_counts to be deterministic - * Stop creating flows_flowruncount rows in db triggers and remove unsquashed index - * Bump required pg_dump version for mailroom_db command to 14 +## v3.0.19 -v7.5.147 (2023-01-09) -------------------------- - * Use und (Undetermined) as default flow language and add support for mul (Multiple) - * Disallow empty and null flow languages, change default spec version to zero - * Tweak migrate_flows to have smaller batch size and order by org to increase org assets cache hits +- Improve performance of contact searches by location by fetching locations in separate query -v7.5.146 (2023-01-05) -------------------------- - * Cleanup migrate_flows command and stop excluding flows with version 11.12 - * Change sample flows language to eng - * Refresh menu when tickets are updated - * Fix frame-top analytics includes - * Fix transparency issue with content menu on editor page +## v3.0.18 -v7.5.145 (2023-01-04) -------------------------- - * Update flow editor to include fix for no expiration route on ivr - * Stop defaulting to base for new flow languages +- Update pyparsing to 2.1.10 +- Update to new django-hamlpy +- Display flow runs exits on the contact timeline +- Fix Travis settings file for Python 3 +- Fix more Python 3 syntax issues +- Fix RecentMessages no longer supporting requests with multiple rules, and add tests for that +- Use print as function rather than statement for future Python 3 compatibility +- Do not populate contact name for anon orgs from Viber +- Add is_squashed to FlowPathCount and FlowRunCount +- Updates to using boto3, if using AWS for storing imports or exports you'll need to change your settings file: `DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'` -v7.5.144 (2023-01-04) -------------------------- - * Ensure all orgs have at least one flow language - * Switch to using temba-date in more places +## v3.0.14 -v7.5.143 (2023-01-02) -------------------------- - * Update mailroom version for CI - * Tidy up org creation (signups and grants) +- Allow for the creation of Facebook referral triggers (opt-in on FB) +- Allow for whitelisting of domains for Facebook channels -v7.5.142 (2022-12-16) -------------------------- - * Fix org listing when org has no users left +## v3.0.13 -v7.5.141 (2022-12-16) -------------------------- - * Fix searching for orgs on manage list page - * Fix highcharts colors - * Fix invalid template name +- New contact field editing UI with Intercooler modals -v7.5.140 (2022-12-15) -------------------------- - * Fix flow results page +## v3.0.9 -v7.5.136 (2022-12-15) -------------------------- - * Tell codecov to ignore static/ - * Switch label action buttons to use temba-dropdown +- Update RecentMessages view to use new recent messages model +- Remove now unused indexes on FlowStep -v7.5.135 (2022-12-13) -------------------------- - * Fix content menu display issues +## v3.0.8 -v7.5.134 (2022-12-13) -------------------------- - * Switch to yarn +- Adds data migration to populate FlowPathRecentStep from existing Flow Steps -v7.5.133 (2022-12-12) -------------------------- - * Bump required python version to 3.10 +## v3.0.7 -v7.5.132 (2022-12-12) -------------------------- - * Support Python 3.10 +- Introduce new model, FlowPathRecentStep that tracks recent steps from one flow node to another. This will replace the rather expensive index used to show recent flow activity on a flow path. -v7.5.131 (2022-12-09) -------------------------- - * Replace .gauge on analytics backend with .gauges which allows backends to send guage values in bulk - * Remove celery auto discovery for jiochat and wechat tasks which were removed +## v3.0.10 -v7.5.130 (2022-12-09) -------------------------- - * Record cron time in analytics +- Log any exceptions encountered in Celery tasks to Raven +- Tell user to get pages_messaging_subscriptions permission for their FB app -v7.5.129 (2022-12-08) -------------------------- - * Cleanup cron task names - * Split task to trim starts and sessions into two separate tasks - * Expose all status counts on flows endpoint - * Read from FlowRunStatusCount instead of FlowRunCount - * Track flow start counts in statement rather than row level trigger +## v3.0.6 -v7.5.128 (2022-12-07) -------------------------- - * Record cron task last stats in redis - * Switch from flake8 to ruff - * Add data migration to convert exit_type counts to status counts +- Replace unicode non breaking spaces with a normal space for GSM7 encoding (Kannel only) +- Add migrations for custom indexes (existing installs before v3 should fake these) -v7.5.127 (2022-12-07) -------------------------- - * Fix counts for triggers on the menu +## v3.0.5 -v7.5.126 (2022-12-06) -------------------------- - * Add new count model for run statuses managed by by-statement db triggers +- fix styling on loader ball animation -v7.5.125 (2022-12-05) -------------------------- - * Tweak index used to find messages to retry so that it includes PENDING messages +## v3.0.4 -v7.5.124 (2022-12-05) -------------------------- - * Update to latest components - * More updates for manage pages +- Fix issue causing flow run table on flow dashboard to be very slow if a flow contained many responses -v7.5.123 (2022-12-02) -------------------------- - * Fix bulk labelling flows +## v3.0.3 -v7.5.122 (2022-12-02) -------------------------- - * Add user read page - * Latest components - * Rework notification and incident types to function more like other typed things - * Add org timezone to manage page - * Remove no longer used group list view - * Log celery task completion by default and rework some tasks to return results included in the logging - * Refresh browser on field deletion in legacy - * Show org plan end as relative time - * Don't show location field types as options on deploys where locations aren't enabled - -v7.5.121 (2022-11-30) -------------------------- - * Fix loading of notification types +- Refactor JSON responses to use native Django JSONResponse +- Dont use proxy for Dart Media and Hub9, expose IPs to whitelist -v7.5.120 (2022-11-30) -------------------------- - * Rework notification types to work more like channel types - * Update API fields endpoint to use name and type for writes as well as reads - * Remove unused field on campaign events write serializer - * Change undocumented pinned field on fields endpoint to be featured - * Add usages field to fields API endpoint, as well as name and type to replace label and value_type - * Add Line error reference URL - -v7.5.119 (2022-11-29) -------------------------- - * Fix flow label in list buttons - * Fix editor StartSessionForm bug for definitions without exclusions - * Remove no longer needed check for plan=parent +## v3.0.2 -v7.5.118 (2022-11-28) -------------------------- - * Add telgram and viber error reference URLs - * Make Org.plan optional - * Add support to create new workspaces from org chooser +- Fixes DartMedia channel for short codes -v7.5.117 (2022-11-23) -------------------------- - * Update to latest editor - * Drop Org.is_multi_org and Org.is_multi_user which have been replaced by Org.features +## v3.0.1 -v7.5.116 (2022-11-23) -------------------------- - * Fix flow label name display +- Remove django-celery as it is unneeded, also stop saving Celery tombstones as we now store + all task state (ContactImport for example) directly in models -v7.5.115 (2022-11-22) -------------------------- - * Default to no features on new child orgs - * Add features field to org update UI +## v3.0.0 -v7.5.114 (2022-11-22) -------------------------- - * Add Org.features and start writing it - * Add error ref url for FBA and IG - * Update temba-components to get new link icon - * Cleanup msg status constants - * Always create new orgs with default plan and only show org_plan for non-child orgs - -v7.5.113 ----------- - * Stop reading Label.label_type and make nullable - * Remove all support for labels with parents - -v7.5.112 ----------- - * Remove OrgActivity - -v7.5.111 ----------- - * Delete associated exports when trying to delete message label folders - -v7.5.110 ----------- - * Data migration to flatten msg labels - -v7.5.109 ----------- - * Remove logic for which plan to use for a new org - -v7.5.108 ----------- - * Tweak how get_new_org_plan is called - * Move isort config to pyproject - * Remove no longer used workspace plan - -v7.5.107 ----------- - * Treat parent and workspace plans as equivalent - -v7.5.106 ----------- - * Tweak flow label flatten migration to not allow new names to exceed 64 chars - -v7.5.105 ----------- - * Display channel logs with earliest at top - -v7.5.104 ----------- - * Remove customized 500 handler - * Remove sentry support - * Data migration to flatten flow labels - * Fix choice of brand for new orgs and move plan selection to classmethod - * Catch CSV corrupted errors - -v7.5.103 ----------- - * Some people don't care for icon constants - * Remove shim for browsers older than IE9 - * Remove google analytics settings - -v7.5.102 ----------- - * Remove google analytics - -v7.5.101 ----------- - * Fix Org.promote - -v7.5.100 ----------- - * Add Org.promote utility method - * Simplify determining whether to rate limit an API request by looking at request.auth - * Data migration to simplify org hierarchies - -v7.5.99 ----------- - * Rename security_settings.py > settings_security.py for consistency - * Drop Org.uses_topups, TopUp, and Debit - * Update to latest components - * Remove unused settings - * Remove TopUp, Debit and Org.uses_topups - -v7.5.98 ----------- - * Drop triggers, indexes and functions related to topups - -v7.5.97 ----------- - * Update mailroom_db command to use postgresql 13 - * Remove User.get_org() - * Always explicitly provide org when requesting a user API token - * Remove Msg.topup, TopUpCredits, and CreditAlert - * Test against latest redis 6.2, elastic 7.17.7 and postgres 13 + 14 - -v7.5.96 ----------- - * Remove topup credits squash task from celery beat - -v7.5.95 ----------- - * Update API auth classes to set request.org and use that to set X-Temba-Org header - * Use dropdown for brand field on org update form - * Remove topups - -v7.5.94 ----------- - * Add missing migration - * Remove support for orgs with brand as the host - * Remove brand tiers - -v7.5.93 ----------- - * Fix new event modal listeners - * Re-add org plan and plan end to update form - * Add png of rapidpro logo - * Update mailroom_db and test_db commands to set org brand as slug - * Add data migration to convert org.brand to be the brand slug - -v7.5.92 ----------- - * Create cla.yml - * Rework branding to not require modifying what is in the settings - -v7.5.91 ----------- - * Remove outdated contributor files - -v7.5.90 ----------- - * Update flow editor - * Remove unused fields from ChannelType - * Allow non-beta users to add WeChat channels - -v7.5.89 ----------- - * Properly truncate the channel name when claiming a WAC channel - * Fix not saving selected date format to new child org - * Add redirect from org_create_child if org has a parent - * Remove unused Org.get_account_value - * Don't allow creation of child orgs within child orgs - * Remove low credit checking code - -v7.5.88 ----------- - * Remove the token refresh tasks for jiochat and wechat channels as courier does this on demand - * Remove Stripe and bundles functionality - -v7.5.87 ----------- - * Remove unused segment and intercom dependencies - * Remove unused utils code - * Update TableExporter to prepare values so individual tasks don't have to - * Update versions of mailroom etc that we use for testing - * Add configurable group membership columns to message, ticket and results exports (WIP) - -v7.5.86 ----------- - * Remove no-loner used credit alert email templates - * Drop ChannelConnection - -v7.5.85 ----------- - * Remove unschedule option from scheduled broadcast read page - * Only show workspace children on settings menu - * Allow adding Android channel when its number is used on a WhatsApp channel - * Remove credit alert functionality - * Add scheduled message delete modal - -v7.5.84 ----------- - * No link fields on sub org page - -v7.5.83 ----------- - * Update telegram library which doesn't work with Python 3.10 - * Add user child workspace management - * Remove topup management views - -v7.5.82 ----------- - * Add JustCall channel type - -v7.5.81 ----------- - * Always show plan formax even for orgs on topups plan - -v7.5.80 ----------- - * Remove task to suspend topups orgs - -v7.5.79 ----------- - * Add new indexes for scheduled broadcasts view and API endpoint - * Update broadcast_on_change db trigger to check is_active - * Use database trigger to prevent status changes on flow sessions that go from exited to waiting - -v7.5.78 ----------- - * Remove old crisp templates - * Added Broadcast.is_active backfill migration - -v7.5.77 ----------- - * Proper redirect when removing channels - * Fix api header when logged out - * Take features out of branding and make it deployment level and remove api_link - * Get rid of flow_types as a branding setting - -v7.5.76 ----------- - * Tweak migration to convert missed call triggers to ignore archived triggers - -v7.5.75 ----------- - * Add Broadcast.is_active and set null=true and default=true - * Remove channel_status_processor context processor - * Add data migration to delete or convert missed call triggers - -v7.5.74 ----------- - * Fix webhook list page to not show every call as an error - * Small styling tweaks for api docs - * Remove fields from msgs event payloads that are no longer used - -v7.5.73 ----------- - * Update api docs to be nav agnostic - * Rewrite API Explorer to be vanilla javascript - * Use single permissions for all msg and contact list views - * Rework UI for incoming call triggers to allow selecting non-voice flows - * Remove send action from messages, add download results for flows - * Unload flow editor when navigating away - -v7.5.72 ----------- - * Always put service menu options at end of menu in new group - -v7.5.71 ----------- - * More appropriate login page, remove legacy textit code - -v7.5.70 ----------- - * Fix which fields should be on org update modal - * Honor brand config for signup - -v7.5.69 ----------- - * Fix race on editor load - -v7.5.68 ----------- - * Add failed reason for channel removed - * Remove no longer used channels option from interrupt_sessions task - -v7.5.67 ----------- - * Interrupt channel by mailroom task - -v7.5.66 ----------- - * Remove need for jquery on spa in-page loads - * Remove key/secret hardcoding for boto session - -v7.5.65 ----------- - * Queue relayer messages with channel UUID and id - * No nouns for current object in menus except for New - * Add common contact field inclusion to exports - * Fix new scheduled message menu option - * Fix releasing other archive files to use proper pagination - -v7.5.64 ----------- - * Add an unlinked call list page - * Show channel log links on more pages to more users - -v7.5.63 ----------- - * Fix handling of relayer messages - * Add missing email templates for ticket exports - -v7.5.62 ----------- - * Add attachment_fetch as new channel log type - -v7.5.61 ----------- - * Fix claiming vonage channels for voice - * Better approach for page titles from the menu - * Fix layout for ticket menu in new ui - -v7.5.60 ----------- - * Fix the flow results export modal - -v7.5.59 ----------- - * Delete attachments from storage when deleting messages - * Add base export class for exports with contact data - * Actually make date range required for message exports (currently just required in UI)) - * Add date range filtering to ticket and results exports - * Add ticket export (only in new UI for now) - -v7.5.58 ----------- - * Add twilio and vonage connection formax entries in new UI - * Update both main menu and content menus to align with new conventions - * Gate new UI by Beta group rather than staff - * Don't show new menu UIs until they're defined - -v7.5.57 ----------- - * Move status updates into update contact view - * Some teaks to rendering of channel logs - * Cleanup use of channelconnection in preparation for dropping - -v7.5.56 ----------- - * Really really fix connection migration - -v7.5.55 ----------- - * Really fix connection migration - -v7.5.54 ----------- - * Fix migration to convert connections to calls - -v7.5.53 ----------- - * Add data migration to convert channel connections to calls - -v7.5.52 ----------- - * Replace last non-API usages of User.get_org() - * Use new call model in UI - -v7.5.51 ----------- - * Add new ivr.Call model to replace channels.ChannelConnection - -v7.5.50 ----------- - * Drop no-longer used ChannelLog fields - * Drop Msg.logs (replaced by .log_uuids) - * Drop ChannelConnection.connection_type - -v7.5.49 ----------- - * Fix test failing because python version changed - * Allow background flows for missed call triggers - * Different show url for spa and non-spa tickets - * Update editor to include fix for localizing categories for some splits - * Add data migration to delete existing missed call triggers for non-message flows - * Restrict Missed Call triggers to messaging flows - -v7.5.48 ----------- - * Stop recommending Android, always recommend Telegram - * Drop IVRCall proxy model and use ChannelConnection consistently - * Add migration to delete non-IVR channel connections - * Fix bug in user releasing and remove special superuser handling in favor of uniform treatment of staff users - -v7.5.47 ----------- - * Switch to temba-datepicker - -v7.5.46 ----------- - * Fix new UI messages menu - -v7.5.45 ----------- - * Replace some occurences of User.get_org() - * Add new create modal for scheduled broadcasts - -v7.5.44 ----------- - * Add data migration to cleanup counts for SystemLabel=Calls - * Tweak ordering of Msg menu sections - * Add slack channel - -v7.5.43 ----------- - * Include config for mailroom test db channels - * Remove Calls from msgs section - * Update wording of Missed Call triggers to clarify they should only be used with Android channels - * Only show Missed Call trigger as option for workspaces with an Android channel - * Change ChannelType.is_available_to and is_recommended_to to include org - -v7.5.42 ----------- - * Add data migration to delete legacy channel logs - * Drop support for channel logs in legacy format - -v7.5.41 ----------- - * Fix temba-store - -v7.5.40 ----------- - * Tweak forgot password success message - -v7.5.39 ----------- - * Add log_uuids field to ChannelConnection, ChannelEvent and Msg - * Improve `trim_http_logs_task` performance by splitting the query - -v7.5.38 ----------- - * Add codecov token to ci.yml - * Remove unnecessary maxdiff set in tests - * Fix to allow displaying logs that timed out - * Add HttpLog util and use to save channel logs in new format - * Add UUID to channel log and msgs - -v7.5.37 ----------- - * Show servicing org - -v7.5.36 ----------- - * Clean up chooser a smidge - -v7.5.35 ----------- - * Add org-chooser - * Refresh channel logs - * Add channel uuid to call log url - * Fix history state on tickets and contacts - * Update footer - * Add download icons for archives - * Fix create flow modal opener - * Flow editor embed styling - * Updating copyright dates and TextIt name (dba of Nyaruka) - -v7.5.34 ----------- - * Use elapsed_ms rather than request_time on channel log templates - * Update components (custom widths for temba-dialog, use anon_display where possible) - * Switch to temba-dialog based attachment viewer, remove previous libs - * Nicer collapsing on flow list columns - * Add overview charts for run results - -v7.5.33 ----------- - * ChannelLogCRUDL.List should use get_description so that it works if log_type is set - * Tweak channel log types to match what courier now creates - * Check for tabs after timeouts, don't auto-collapse flows - * Add charts to analytics tab - -v7.5.32 ----------- - * Update components with label fix - -v7.5.31 ----------- - * Add flow results in new UI - -v7.5.30 ----------- - * Remove steps for add WAC credit line to businesses - -v7.5.29 ----------- - * Fix servicing of channel logs - -v7.5.28 ----------- - * Stop writing to unused media name field - * Add missing C Msg failed reason - * Add anon-display field to API contact results if org is anon and make urn display null - -v7.5.27 ----------- - * Revert change to Contact.Bulk_urn_cache_initialize to have it set org on contacts - -v7.5.26 ----------- - * Don't set org on bulk initialized contacts - -v7.5.25 ----------- - * Fix filtering on channel log call page - * Add anon_display and use that when org is anon instead of using urn_display for anon id - * Add urn_display to contact reference on serialized runs in API - -v7.5.24 ----------- - * Fix missing service end button - -v7.5.23 ----------- - * Update to latest floweditor - * Add new ChannelLog log type choices and make description nullable - * Fix more content menus so that they can be fetched as JSON and add more tests - -v7.5.22 ----------- - * Remove unused policies.policy_read perm - * Replace all permission checking against Customer Support group with is_staff check on user - -v7.5.21 ----------- - * Allow views with ContentMenuMixin to be fetched as JSON menu items using a header - * Add new fields to channel log model and start reading from them if they're set - -v7.5.20 ----------- - * Update the links for line developers console on the line claim page - * Rework channel log details views into one generic one, one for messages, one for calls - -v7.5.19 ----------- - * Rework channel log rendering to use common HTTPLog template - * Fix titles on channel, classifier and manage logins pages - -v7.5.18 ----------- - * Workspace and user management in new UI - -v7.5.17 ----------- - * Show send history of scheduled broadcasts in correct order - * Only show option to delete runs to users who have that perm, and give editors that perm - * Update deps - -v7.5.16 ----------- - * Fixed zaper page title - * Validate channel name is not more than 64 characters - * Added 'authentication' to the temba anchor URL text - -v7.5.15 ----------- - * Fix URL for media uploads which was previously conflicting with media directory - -v7.5.14 ----------- - * Deprecate Media.name which can always be inferred from .path - * Improve cleaning of media filenames - * Convert legacy UUID fields on exports and labels - * Request instagram_basic permission for IG channels - -v7.5.11 ----------- - * Don't allow creating of labels with parents or editing labels to have a parent - * Rework the undocumented media API endpoint to be more specific to surveyor attachments - * Add MediaCRUDL with upload and list endpoints - * Remove requiring instagram_basic permission - -v7.5.10 ----------- - * Remove Media.is_ready, fix setting .status on alternates, add limit for upload size - * Rework ContentMenuMixin to put the menu in the context, and include new and legacy formats - -v7.5.9 ----------- - * Add status field to Media, move primary index to UUID field - -v7.5.8 ----------- - * Update floweditor - * Convert all views to use ContentMenuMixin instead of get_gear_links - * Add decorator to mock uuid generation in tests - * Process media uploads with ffmpeg in celery task - -v7.5.7 ----------- - * Add constraint to ensure non-waiting/active runs have exited_on set - * Add constraint to ensure non-waiting sessions have an ended_on - -v7.5.6 ----------- - * Remove unused upload_recording endpoint - * Add Media model - -v7.5.5 ----------- - * Remaining fallback modax references - * Add util for easier gear menu creation - * Add option to interrupt a contact from read page - -v7.5.4 ----------- - * Fix scripts on contact page start modal - * Add logging for IG channel claim failures - * Add features to BRANDING which determines whether brands have access to features - * Sort permissions a-z - * Fix related names on Flow.topics and Flow.users and add Topic.release - * Expose opened_by and opened_in over ticket API - -v7.5.3 ----------- - * Fix id for custom fields modal - -v7.5.2 ----------- - * Fix typo on archive button - * Only show active ticketers and topics on Open Ticket modal - * Add data migration to fix non-waiting sessions with no ended_on - -v7.5.1 ----------- - * Allow claiming WAC test numbers - * Move black setting into pyproject.toml - * Add Open Ticket modal view to contact read page - -v7.5.0 ----------- - * Improve user list page - * Add new fields to Ticket record who or what flow opened a ticket - * Refresh menu on modax redircts, omit excess listeners from legacy lists - * Fix field label vs name in new UI - * Add start flow bulk action in new UI - * Show zeros in menu items in new UI - * Add workspace selection to account page in new UI - * Scroll main content pane up on page replacement in new UI - -v7.4.2 ----------- - * Update copyright notice - * Update stable versions - -v7.4.1 ----------- - * Update locale files - -v7.4.0 ----------- - * Remove superfulous Beta group perm - * Update new UI opt in permissions - * More tweaks to WhatsApp Cloud channel claiming - -v7.3.79 ----------- - * Add missing Facebook ID - -v7.3.78 ----------- - * Add button to allow admin to choose more FB WAC numbers - -v7.3.77 ----------- - * Add contact ticket list in new UI - * Fix permissions to connect WAC - * Register the WAC number in the activate method - -v7.3.76 ----------- - * Add the Facebook dialog login if the token is not submitted successfully on WAC org connect - * Fix campaigns archive and activate buttons - * Update to latest Django - * Only display WA templates that are active - * Update flow start dialog to use start preview endpoint - * Add start flow bulk action for contacts - -v7.3.75 ----------- - * Redirect to channel page after WAC claim - * Fix org update pre form users roles list - * Adjust permission for org whatsapp connect view - * Ignore new conversation triggers without channels in imports - -v7.3.74 ----------- - * Use FB JS SDK for WAC signups - -v7.3.73 ----------- - * Add DB constraint to disallow active or waiting runs without a session - -v7.3.72 ----------- - * Add DB constraint to enforce that flow sessions always have output or output_url - -v7.3.71 ----------- - * Make sure all limits are updatable on the workspace update view - * Remove duplicated pagination - * Enforce channels limit per workspace - -v7.3.70 ----------- - * Fix workspace group limit check for existing group import - * Drop no longer used role m2ms - -v7.3.69 ----------- - * Fix campaign links - -v7.3.68 ----------- - * Add WhatsApp API version choice field - * Stop writing to the role specific m2m tables - * Add pending events tab to contact details - -v7.3.67 ----------- - * Merge pull request #3865 from nyaruka/plivo_claim - * formatting - * Sanitize plivo app names to match new rules - -v7.3.66 ----------- - * Merge pull request #3864 from nyaruka/fix-WA-templates - * Fix message templates syncing for new categories - -v7.3.65 ----------- - * Fix surveyor joins so new users are added to orgmembership as well. - -v7.3.64 ----------- - * Fix fetching org users with given roles - -v7.3.63 ----------- - * Update mailroom_db command to correctly add users to orgs - * Stop reading from org role m2m tables - -v7.3.62 ----------- - * Fix rendering of dates on upcoming events list - * Data migration to backfill OrgMembership - -v7.3.61 ----------- - * Add missing migration - -v7.3.60 ----------- - * Data migration to fail active/waiting runs with no session - * Include scheduled triggers in upcoming contact events - * Add OrgMembership model - -v7.3.59 ----------- - * Spreadsheet layout for contact fields in new UI - * Adjust WAC channel claim to add system admin with user token - -v7.3.58 ----------- - * Clean up chat media treatment - * Add endpoint to get upcoming scheduled events for a contact - * Remove filtering by ticketer on tickets API endpoint and add indexes - * Add status to contacts API endpoint - -v7.3.57 ----------- - * Improve WAC phone number verification flow and feedback - * Adjust name of WAC channels to include the number - * Fix manage user update URL on org update page - * Support missing target_ids key in WAC responses - -v7.3.56 ----------- - * Fix deletion of users - * Cleanup user update form - * Fix missing users manage link page - * Add views to verify and register a WAC number - -v7.3.55 ----------- - * Update contact search summary encoding - -v7.3.54 ----------- - * Make channel type a property and use to determine redact values in HTTP request logs - -v7.3.53 ----------- - * Make WAC channel visible to beta group - -v7.3.52 ----------- - * Fix field name for submitted token - -v7.3.51 ----------- - * Use default API throttle rates for unauthenticated users - * Bump pyjwt from 2.3.0 to 2.4.0 - * Cache user role on org - * Add WhatsApp Cloud channel type - -v7.3.50 ----------- - * Make Twitter channels beta only for now - * Use cached role permissions for permission checking and fix incorrect permissions on some -API views - * Move remaining mockey patched methods on auth.User to orgs.User - -v7.3.49 ----------- - * Timings in export stats spreadsheet should be rounded to nearest second - * Include failed_reason/failed_reason_display on msg_created events - * Move more monkey patching on auth.User to orgs.User - -v7.3.48 ----------- - * Include first reply timings in ticket stats export - * Create a proxy model for User and start moving some of the monkey patching to proper methods on that - -v7.3.47 ----------- - * Data migration to backfill ticket first reply timings - -v7.3.46 ----------- - * Add new squashable model to track average ticket reply times and close times - * Add Ticket.replied_on - -v7.3.45 ----------- - * Add endpoint to export Excel sheet of ticket daily counts for last 90 days - -v7.3.44 ----------- - * Remove omnibox support for fetching by label and message - * Remove functionality for creating new label folders and creating labels with folders - -v7.3.43 ----------- - * Fix generating cloned flow names so they can't end with trailing spaces - * Deleting of globals should be soft like other types - * Simplify checking of workspace limits in UI and API - -v7.3.42 ----------- - * Data migration to backfill ticket daily counts - -v7.3.41 ----------- - * Reorganization of temba.utils.models - * Update the approach to the test a token is valid for FBA and IG channels - * Promote ContactField and Global to be TembaModels whilst for now retaining their custom name validation logic - * Add import support methods to TembaModel and use with Topic - -v7.3.40 ----------- - * Add workspace plan, disallow grandchild org creation. - * Add support for shared usage tracking - -v7.3.39 ----------- - * Move temba.utils.models to its own package - * Queue broadcasts to mailroom with their created_by - * Add teams to mailroom test database - * Add is_system to TembaModel, downgrade Contact to SmartModel - -v7.3.38 ----------- - * Make sure we request a FB long lived page token using a long lived user token - * Convert campaign and campaignevent to use real UUIDs, simplify use of constants in API - -v7.3.37 ----------- - * Don't forget to squash TicketDailyCount - * Fix imports of flows with ticket topic dependencies - -v7.3.36 ----------- - * Add migration to update names of deleted labels and add constraint to enforce uniqueness - * Move org limit checking from serializers to API views - * Generalize preventing deletion of system objects via the API and allow deleting of groups that are used in flows - * Serialized topics in the API should include system field - * Add name uniqueness constraints to Team and Topic - * Add Team and TicketDailyCount models - -v7.3.35 ----------- - * Tweaks to Topic model to enforce name uniqueness - * Add __str__ and __repr__ to TembaModel to replace custom methods and remove several unused ones - * Convert FlowLabel to be a TembaModel - -v7.3.34 ----------- - * Fix copying flows to generate a unique name - * Rework TembaModel to be a base model class with UUID and name - -v7.3.33 ----------- - * Use model mixin for common name functionality across models - -v7.3.32 ----------- - * Add DB constraint to enforce flow name uniqueness - -v7.3.31 ----------- - * Update components with resolved locked file - -v7.3.29 ----------- - * Fix for flatpickr issue breaking date picker - * ContactField.get_or_create should enforce name uniqeuness and ignore invalid names - * Add validation error when changing type of field used by campaign events - -v7.3.28 ----------- - * Tweak flow name uniqueness migration to honor max flow name length - -v7.3.27 ----------- - * Tweak header to be uniform treatment regardless of menu - * Data migration to make flow names unique - * Add flow.preview_start endpoint which calls mailroom endpoint - -v7.3.26 ----------- - * Fix mailroom_db command to set languages on new orgs - * Fix inline menus when they have no children - * Fix message exports - -v7.3.25 ----------- - * Fix modals on spa pages - * Add service button to org edit page - * Update to latest django - * Add flow name to message Export if we have it - -v7.3.24 ----------- - * Allow creating channel with same address when schemes do not overlap - -v7.3.23 ----------- - * Add status to list of reserved field keys - * Migration to drop ContactField.label and field_type - -v7.3.22 ----------- - * Update contact modified_on when deleting a group they belong to - * Add custom name validator and use for groups and flows - -v7.3.21 ----------- - * Fix rendering of field names on contact read page - * Stop writing ContactField.label and field_type - -v7.3.20 ----------- - * Stop reading ContactField.label and field_type - -v7.3.19 ----------- - * Correct set new ContactField fields in mailroom_db test_db commands - * Update version of codecov action as well as versions of rp-indexer and mailroom used by tests - * Data migration to populate name and is_system on ContactField - -v7.3.18 ----------- - * Give contact fields a name and is_system db field - * Update list of reserved keys for contact fields - -v7.3.17 ----------- - * Fix uploading attachments to properly get uploaded URL - -v7.3.16 ----------- - * Fix generating of unique flow, group and campaign names to respect case-insensitivity and max name length - * Add data migration to prefix names of previously deleted flows - * Prefix flow names with a UUID when deleted so they don't conflict with other flow names - * Remove warning about feature on flow start modal being removed - -v7.3.15 ----------- - * Check name uniqueness on flow creation and updating - * Cleanup existing field validation on flow and group forms - * Do not fail to release a channel when we cannot reach the Facebook API for FB channels - -v7.3.14 ----------- - * Convert flows to be a soft dependency - -v7.3.13 ----------- - * Replace default index on FlowRun.contact with one that includes flow_id - -v7.3.12 ----------- - * Data migration to give every workspace an Open Tickets smart system group - -v7.3.11 ----------- - * Fix bulk adding/removing to groups from contact list pages - * Convert groups into a soft dependency for flows - * Use dataclasses instead of NaamedTuples where appropriate - -v7.3.10 ----------- - * Remove path from example result in runs API endpoint docs - * Prevent updating or deleting of system groups via the API or UI - * Add system property to groups endpoint and fix docs - -v7.3.9 ----------- - * Remove IG channel beta gating - -v7.3.8 ----------- - * Fix fetching of groups from API when using separate readonly DB connection - -v7.3.7 ----------- - * Rework how we fetch contact groups - -v7.3.6 ----------- - * For FB / IG claim pages use expiring token if no long lived token is provided - -v7.3.5 ----------- - * Data migration to update group_type=U to M|Q - -v7.3.4 ----------- - * Merge pull request #3734 from nyaruka/FB-IG-claim - -v7.3.3 ----------- - * Check all org groups when creating unique group names - * Make ContactGroup.is_system non-null and switch to using to distinguish between system and user groups - -v7.3.2 ----------- - * Data migration to populate ContactGroup.is_system - -v7.3.1 ----------- - * Add is_system field to ContactGroup and rename 'dynamic' to 'smart' - * Return 404 from edit_sub_org if org doesn't exist - * Use live JS SDK for FBA and IG refresh token views - * Add scheme to flow results exports - -v7.3.0 ----------- - * Add countries supported by Africastalking - * Replace empty squashed migrations with real ones - -v7.2.4 ----------- - * Update stable versions in README - -v7.2.3 ----------- - * Add empty versions of squashed migrations to be implemented in 7.3 - -v7.2.2 ----------- - * Updated translations from Transifex - * Fix searching on calls list page - -v7.2.1 ----------- - * Update locale files - -v7.2.0 ----------- - * Disallow PO export/import for archived flows because mailroom doesn't know about them - * Add campaigns section to new UI - -v7.1.82 ----------- - * Update to latest flake8, black and isort - -v7.1.81 ----------- - * Remove unused collect_metrics_task - * Bump dependencies - -v7.1.80 ----------- - * Remove progress bar on facebook claim - * Replace old indexes based on flows_flowrun.is_active - -v7.1.79 ----------- - * Remove progress dots for FBA and IG channel claim pages - * Actually drop exit_type, is_active and delete_reason on FlowRun - * Fix group name validation to include system groups - -v7.1.78 ----------- - * Test with latest indexer and mailroom - * Stop using FlowRun.exit_type, is_active and delete_reason - -v7.1.77 ----------- - * Tweak migration as Postgres won't let us drop function being used - -v7.1.76 ----------- - * Update vonage deprecated methods - -v7.1.75 ----------- - * Rework flowrun db triggers to use status rather than exit_type or is_active - -v7.1.74 ----------- - * Allow archiving of flow messages - * Don't try interrupting session that is about to be deleted - * Tweak criteria for who can preview new interface - -v7.1.73 ----------- - * Data migration to fix facebook contacts name - -v7.1.72 ----------- - * Revert database trigger changes which stopped deleting path and exit_type counts on flowrun deletion - -v7.1.71 ----------- - * Fix race condition in contact deletion - * Rework flowrun database triggers to look at delete_from_results instead of delete_reason - -v7.1.69 ----------- - * Update to latest floweditor - -v7.1.68 ----------- - * Add FlowRun.delete_from_results to replace delete_reason - -v7.1.67 ----------- - * Drop no longer used Msg.delete_reason and delete_from_counts columns - * Update to Facebook Graph API v12 - -v7.1.66 ----------- - * Fix last reference to Msg.delete_reason in db triggers and stop writing that on deletion - -v7.1.65 ----------- - * Rework msgs database triggers so we don't track counts for messages in archives - -v7.1.64 ----------- - * API rate limits should be org scoped except for staff accounts - * Expose current flow on contact read page for all users - * Add deprecation text for restart_participants - -v7.1.63 ----------- - * Fix documentation of contacts API endpoint - * Release URN channel events in data migration to fix deleted contacts with tickets - * Use original filename inside UUID folder to upload media files - -v7.1.62 ----------- - * Tweak migration to only fully delete inactive contacts with tickets - -v7.1.61 ----------- - * Add flow field to contacts API endpoint - * Add support to the audit_es command for dumping ES queries - * Add migration to make sure contacts which we failed to delete are really deleted - * Fix contact release with tickets having a broadcast - -v7.1.60 ----------- - * Adjust WA message template warning to not be show for Twilio WhatsApp channels - * Add support to increase API rates per org - -v7.1.59 ----------- - * Add migration to populate Contact.current_flow - -v7.1.58 ----------- - * Restrict msg visibility changes on bulk actions endpoint - -v7.1.57 ----------- - * Add sentry id for 500 page - * Display current flow on contact read page for beta users - * Add new msg visibility for msgs deleted by senders and allow deleted msgs to appear redacted in contact histories - * Contact imports should strip empty rows, missing a UUID or URNs - -v7.1.56 ----------- - * Fix issue with sending to step_node - * Add missing languages for whatsapp templates - * Add migration to remove inactive contacts from user groups - -v7.1.55 ----------- - * Fix horizontal scrolling in editor - * Add support to undo_footgun command to revert status changes - -v7.1.53 ----------- - * Relayer syncing should ignore bad URNs that fail validation in mailroom - * Add unique constraint to ContactGroup to enforce name uniqueness within an org - -v7.1.52 ----------- - * Fix scrolling select - -v7.1.51 ----------- - * Merge pull request #3671 from nyaruka/ui-widget-fixes - * Fix select for slow clicks and removing rules in the editor - -v7.1.50 ----------- - * Add migration to make contact group names unique within an organization - * Add cookie based path to opt in and out of new interface - -v7.1.49 ----------- - * Update to Django 4 - -v7.1.48 ----------- - * Make IG channel beta gated - * Remove expires_on, parent_uuid and connection_id fields from FlowRun - * Add background flow options to campaign event dialog - -v7.1.47 ----------- - * Make FlowSession.wait_resume_on_expire not-null - -v7.1.46 ----------- - * Add migration to set wait_resume_on_expire on flow sessions - * Update task used to update run expirations to also update them on the session - -v7.1.45 ----------- - * Make FlowSession.status non-null and add constraint to ensure waiting sessions have wait_started_on and wait_expires_on set - -v7.1.44 ----------- - * Fix login via password managers - * Change gujarati code language to 'guj' - * Add instagram channel type - * Add interstitial when inactive contact search meets threshold - -v7.1.42 ----------- - * Add missing migration - -v7.1.41 ----------- - * Add Contact.current_flow - -v7.1.40 ----------- - * Drop FlowRun.events and FlowPathRecentRun - -v7.1.39 ----------- - * Include qrious.js script - * Add FlowSession.wait_resume_on_expire - * Add Msg.flow - -v7.1.38 ----------- - * Replace uses of deprecated Django functions - * Remove crisp and librato analytics backends and add ConsoleBackend as example - * Data migration to populate FlowSession.wait_started_on and wait_expires_on - -v7.1.37 ----------- - * Migration to remove recent run creation from db triggers - * Remove no longer used recent messages view and functionality on FlowPathRecentRun - -v7.1.36 ----------- - * Add scheme column on contact exports for anon orgs - * Remove option to include router arguments in downloaded PO files - * Make loading of analytics backends dynamic based on setting of backend class paths - -v7.1.35 ----------- - * Only display crisp support widget if brand supports it - * Do crisp chat widget embedding via analytics template hook - -v7.1.34 ----------- - * Update to editor v1.16.1 - -v7.1.33 ----------- - * Add management to fix broken flows - * Use new recent contacts endpoint for editor - -v7.1.32 ----------- - * Temporarily put crisp_website_id back in context - -v7.1.31 ----------- - * Remove include_msgs option of flow result exports - -v7.1.30 ----------- - * Update to latest flow editor - -v7.1.29 ----------- - * Update to latest floweditor - * Add FlowSession.wait_expires_on - * Improve validation of flow expires values - * Remove segment and intercom integrations and rework librato and crisp into a pluggable analytics framwork - -v7.1.28 ----------- - * Convert FlowRun.id and FlowSession.id to BIGINT - -v7.1.27 ----------- - * Drop no longer used FlowRun.parent - -v7.1.26 ----------- - * Prefer UTF-8 if we're not sure about encoding of CSV import - -v7.1.25 ----------- - * Fix Kaleyra claim blurb - * Fix HTTPLog read page showing warning shading for healthy calls - -v7.1.24 ----------- - * Fix crisp identify on signup - * Use same event structure for Crisp as others - -v7.1.23 ----------- - * Update help links for the editor - * Add failed reason for failed destination such as missing channel or URNs - * Add view to fetch recent contacts from Redis - -v7.1.22 ----------- - * Fix join syntax - -v7.1.21 ----------- - * Fix join syntax, argh - -v7.1.20 ----------- - * Arrays not allowed on track events - -v7.1.19 ----------- - * Add missing env to settings_common - -v7.1.18 ----------- - * Implement crisp as an analytics integration - -v7.1.17 ----------- - * Tweak event tracking for results exports - * Revert change to hide non-responded runs in UI - -v7.1.16 ----------- - * Drop Msg.response_to - * Drop Msg.connection_id - -v7.1.15 ----------- - * Remove path field from API runs endpoint docs - * Hide options to include non-responded runs on results download modal and results page - * Fix welcome page widths - * Update mailroom_db to require pg_dump version 12.* - * Update temba-components - * Add workspace page to new UI - -v7.1.14 ----------- - * Fix wrap for recipients list on flow start log - * Set Msg.delete_from_counts when releasing a msg - * Msg.fail_old_messages should set failed_reason - * Add new fields to Msg: delete_from_counts, failed_reason, response_to_external_id - * Tweak msg_dewire command to only fetch messages which have never errored - -v7.1.13 ----------- - * Add management command to dewire messages based on a file of ids - * Render webhook calls which are too slow as errors - -v7.1.12 ----------- - * Remove last of msg sending code - * Fix link to webhook log - -v7.1.11 ----------- - * Remove unnecessary conditional load of jquery - -v7.1.10 ----------- - * Make forgot password email look a little nicer and be easier to localize - -v7.1.9 ----------- - * Fix email template for password forgets - -v7.1.8 ----------- - * Remove chatbase as an integration as it no longer exists - * Clear keyword triggers when switching to flow type that doesn't support them - * Use branded emails for export notifications - -v7.1.5 ----------- - * Remove warning on flow start modal about settings changes - * Add privacy policy link - * Test with Redis 3.2.4 - * Updates for label sub menu and internal menu navigation - -v7.1.4 ----------- - * Remove task to retry errored messages which now handled in mailroom - -v7.1.2 ----------- - * Update poetry dependencies - * Update to latest editor +- IMPORTANT: This release resets all Temba migrations. You need to run the latest migrations + from a version preceding this one, then fake all temba migrations when deploying: -v7.1.1 ----------- - * Remove channel alert notifications as these will become incidents - * Add Incident model as well as OrgFlagged and WebhooksUnhealthy types - -v7.1.0 ----------- - * Drop no longer used index on msg UUID - * Re-run collect_sql - * Use std collection types for typing hints and drop use of object in classes - -v7.0.4 ----------- - * Fix contact stop list page - * Update to latest black to fix errors on Python 3.9.8 - * Add missing migration - -v7.0.3 ----------- - * Update to latest editor v1.15.1 - * Update locale files which adds cs and mn - -v7.0.2 ----------- - * Update editor to v1.15 with validation fixes - * Fix outbox pagination - * Add generic title bar with new dropdown on spa - -v7.0.1 ----------- - * Add missing JS function to delete messages in the archived folder - * Update locale files - -v7.0.0 ----------- - * Fix test failing to due bad domain lookup - -v6.5.71 ----------- - * Add migration to remove deleted contacts and groups from scheduled broadcasts - * Releasing a contact or group should also remove it from scheduled broadcasts - -v6.5.70 ----------- - * Fix intermittent credit test failure - * Tidy up Msg and Broadcast constants - * Simplify settings for org limit defaults - * Fix rendering of deleted contacts and groups in recipient lists - -v6.5.69 ----------- - * Remove extra labels on contact fields - -v6.5.68 ----------- - * Reenable chat monitoring - -v6.5.67 ----------- - * Make ticket views and components in sync - -v6.5.66 ----------- - * Add channel menu - * Add test for dynamic contact group list, remove editor_next redirect - * Fix styling on contact list headersa and flow embedding - * Add messages to menu, refresh override - * Switch contact fields and import to use template inheritance - * Use template inheritance for spa work - * Add deeplinking support for non-menued destinations - -v6.5.65 ----------- - * Move to Python 3.9 - -v6.5.64 ----------- - * Fix export notification email links - -v6.5.63 ----------- - * When a contact is released their tickets should be deleted - * Test on PG 12 and 13 - * Use S3 Select for message exports - * Use new notifications system for export emails - -v6.5.62 ----------- - * Use crontab for WA tokens task schedule - * Allow keyword triggers to be single emojis - * Celery 5.x - -v6.5.60 ----------- - * Add option to audit_archives to check flow run counts - * Drop no longer used ticket subject column - * Add contact read page based on contact chat component - -v6.5.59 ----------- - * Less progress updates in audit_archives - * Tweak tickets API endpoint to accept a uuid URL param - -v6.5.58 ----------- - * Add progress feedback to audit_archives - * Update locale files - -v6.5.57 ----------- - * Fix Archive.rewrite - -v6.5.56 ----------- - * Encode content hashes sent to S3 using Base64 - -v6.5.55 ----------- - * Trim mailgun ticketer names to <= 64 chars when creating - * Management command to audit archives - * Use field limiting on omnibox searches - -v6.5.54 ----------- - * Fix S3 select query generation for date fields - -v6.5.53 ----------- - * Disable all sentry transactions - * Use S3 select for flow result exports - * Add utils for compiling S3 select queries - -v6.5.52 ----------- - * Merge pull request #3555 from nyaruka/ticket-att - * Update test to include attachment list for last_msg - * Update CHANGELOG.md for v6.5.51 - * Merge pull request #3553 from nyaruka/httplog_tweaks - * Merge pull request #3554 from nyaruka/s3_retries - * Add other missing migration - * Add retry config to S3 client - * Add missing migration to drop WebhookResult model - * Update CHANGELOG.md for v6.5.50 - * Merge pull request #3552 from nyaruka/fix-WA-check-health-logs - * Fix tests - * Add zero defaults to HTTPLog fields, drop WebHookResult and tweak HTTPLog templates for consistency - * Fix response for WA message template to be HTTP response - * Update CHANGELOG.md for v6.5.49 - * Merge pull request #3549 from nyaruka/retention_periods - * Merge pull request #3546 from nyaruka/readonly_exports - * Merge pull request #3548 from nyaruka/fix-WA-check-health-logs - * Merge pull request #3550 from nyaruka/truncate-org - * Use single retention period setting for all channel logs - * Truncate org name with ellipsis on org chooser - * Add new setting for retention periods for different types and make trimming tasks more consistent - * Use readonly database connection for contact, message and results exports - * Add migration file - * Log update WA status error using HTTPLog - -v6.5.51 ----------- - * Add retry config to S3 client - * Add zero defaults to HTTPLog fields, drop WebHookResult and tweak HTTPLog templates for consistency - -v6.5.50 ----------- - * Fix response for WA message template to be HTTP response - -v6.5.49 ----------- - * Truncate org name with ellipsis on org chooser - * Add new setting for retention periods for different types and make trimming tasks more consistent - * Use readonly database connection for contact, message and results exports - * Log update WA status error using HTTPLog - -v6.5.48 ----------- - * Fix clear contact field event on ticket history - -v6.5.47 ----------- - * Use readonly database connection for contacts API endpoint - * Use webhook_called events from sessions for contact history - * Remove unused webhook result views and improve httplog read view - * Fix API endpoints not always using readonly database connection and add testing - -v6.5.46 ----------- - * Move list refresh registration out of content block - -v6.5.45 ----------- - * Temporarily disable refresh - * Don't use readonly database connection for GETs to contacts endpoint - * Add view for webhook calls saved as HTTP logs - * Pass location support flag to editor as a feature flag - -v6.5.44 ----------- - * GET requests to API should use readonly database on the view's queryset - -v6.5.43 ----------- - * Tweak how HTTP logs are deleted - * Add num_retries field to HTTPLog - -v6.5.42 ----------- - * Pin pyopenxel to 3.0.7 until 3.0.8 release problems resolved - * Add new fields to HTTPLog to support saving webhook results - * Make TPS for Shaqodoon be 5 by default - * Make location support optional via new branding setting - -v6.5.41 ----------- - * Update editor with fix for field creation - * Minor tidying of HTTPLog - * Fix rendering of tickets on contact read page which now don't have subjects - -v6.5.40 ----------- - * Update to floweditor 1.14.2 - * Tweak database settings to add new readonly connection and remove no longer used direct connection - * Update menu on ticket list update - -v6.5.38 ----------- - * Deprecate subjects on tickets in favor of topics - * Tweak ticket bulk action endpoint to allow unassigning - * Add API endpoint to read and write ticket topics - -v6.5.37 ----------- - * Add tracking of unseen notification counts for users - * Clear ticket notifications when visiting appropriate ticket views - * Remove no longer used Log model - -v6.5.36 ----------- - * Revert cryptography update - -v6.5.35 ----------- - * Update to newer pycountry and bump other minor versions - * Fix ticketer HTTP logs not being accessible - * Add management command to re-eval a smart group - * Add comment to event_fires about mailroom issue - * Fix indexes on tickets to match new UI - * Now that mailroom is setting ContactImport.status, use in reads - -v6.5.34 ----------- - * Update to latest components (fixes overzealous list refresh, non-breaking ticket summary, and display name when created_by is null) - -v6.5.33 ----------- - * Fix Add To Group bulk action on contact list page - * Add status field to ContactImport and before starting batches, set redis key mailroom can use to track progress - * Delete unused template and minor cleanup - -v6.5.32 ----------- - * Fix template indentation - * Pass force=True when closing ticket as part of releasing a ticketer - * Add beginings of new nav and SPA based UI (hidden from users for now) - -v6.5.31 ----------- - * Show masked urns for contacts API on anon orgs - * Rework notifications, don't use Log model - -v6.5.30 ----------- - * Fix deleting of imports and exports now that they have associated logs - -v6.5.29 ----------- - * Add basic (and unused for now) JSON endpoint for listing notifications - * Reduce sentry trace sampling to 0.01 - * Override kir language name - * Add change_topic as action to ticket bulk actions API endpoint - * Add Log and Notification model - -v6.5.28 ----------- - * Add new ticket event type for topic changes - * Migrations to assign default topic to all existing tickets - -v6.5.27 ----------- - * Add migration to give all existing orgs a default ticket topic - -v6.5.26 ----------- - * Move mailroom_db data to external JSON file - * Run CI tests with latest mailroom - * Add ticket topic model and initialize orgs with a default topic - -v6.5.25 ----------- - * Improve display of channels logs for calls - -v6.5.24 ----------- - * Add machine detection as config option to channels with call role - * Tweak event_fires management command to show timesince for events in the past - -v6.5.23 ----------- - * Drop retry_count, make error_count non-null - * Improve channel log templates so that we use consistent date formating, show call error reasons, and show back button for calls - * Tweak how we assert form errors and fix where they don't match exactly - * Re-add QUEUED status for channel connections - -v6.5.22 ----------- - * Tweak index used for retrying IVR calls to only include statuses Q and E - * Dont show ticket events like note added or assignment on contact read page - * Include error reason in call_started events in contact history - * Remove channel connection statuses that we don't use and add error_reason - -v6.5.21 ----------- - * Prevent saving of campaign events without start_mode - * Improve handling of group lookups in contact list views - * Add button to see channel error logs - -v6.5.20 ----------- - * Make ChannelConnection.error_count nullable so it can be removed - * Cleanup ChannelConnection and add index for IVR retries - * Fix error display on contact update modal - * Update to zapier app directory, wide formax option and fixes - * Enable filtering on the channel log to see only errors - -v6.5.19 ----------- - * Fix system group labels on contact read page - * Use shared error messages for orgs being flagged or suspended - * Update to latest smartmin (ignores _format=json on views that don't support it) - * Add command to undo events from a flow start - * Send modal should validate URNs - * Use s3 when appropriate to get session output - * Add basic user accounts API endpoint - -v6.5.18 ----------- - * Apply webhook ticket fix to successful webhook calls too - -v6.5.17 ----------- - * Tweak error message on flow start modal now field component is fixed - * Fix issue for ticket window growing with url length - * Update LUIS classifiers to work with latest API requirements - * Tweak migration to populate contact.ticket_count so that it can be run manually - * Switch from django.contrib.postgres.fields.JSONField to django.db.models.JSONField - * Introduce s3 utility functions, use for reading s3 sessions in contact history - -v6.5.16 ----------- - * Update to Django 3.2 - * Migration to populate contact.ticket_count - -v6.5.15 ----------- - * Add warning to flow start modal that options have changed - * Fix importing of dynamic groups when field doesn't exist - -v6.5.14 ----------- - * Update to latest cryptography 3.x - * Add deep linking for tickets - * Update db trigger on ticket table to maintain contact.ticket_count - -v6.5.13 ----------- - * Tweak previous data migration to work with migrate_manual - -v6.5.12 ----------- - * Migration to zeroize contact.ticket_count and make it non-null - -v6.5.11 ----------- - * Allow deletion of fields used by campaign events - * Add last_activity_on to ticket folder endpoints - * Add API endpoint for ticket bulk actions - * Add nullable Contact.ticket_count field - -v6.5.10 ----------- - * Remove textit-whatsapp channel type - * Show ticket counts on ticketing UI - * Update to latest components with fixes for scrollbar and modax reuse - * Use new generic dependency delete modal for contact fields - -v6.5.9 ----------- - * Add management command for listing scheduled event fires - * Add index for ticket count squashing task - * Add data migration to populate ticket counts - * Add constraint to Msg to disallow sent messages without sent_on and migration to fix existing messages like that - -v6.5.8 ----------- - * Fix celery task name - -v6.5.7 ----------- - * Fix flow start modal when starting flows is blocked - * Add more information to audit_es_group command - * Re-save Flow.has_issues on final flow inspection at end of import process - * Add squashable model for ticket counts - * Add usages modal for labels as well - * Update the WA API version for channel that had it set when added - * Break out ticket folders from status, add url state - -v6.5.6 ----------- - * Set sent_on if not already set when handling a mt_dlvd relayer cmd - * Display sent_on time rather than created_on time in Sent view - * Only sample 10% of requests to sentry - * Fix searching for scheduled broadcasts - * Update Dialog360 API usage - -v6.5.5 ----------- - * Fix export page to use new filter to get non-localized class name for ids - * Fix contact field update - * Add searchable to trigger groups - * Add option to not retry IVR calls - * Add usages modal for groups - * Tweak wording on flow start modal - -v6.5.4 ----------- - * Rework flow start modal to show options as exclusions which are unchecked by default - * Change sent messages view to be ordered by -sent_on - -v6.5.3 ----------- - * Add Last Seen On as column to contact exports - * Resuable template for dependency lists - -v6.5.2 ----------- - * Internal ticketer for all orgs - -v6.5.1 ----------- - * Cleanup Msg CRUDL tests - * Cleanup squashable models - * Apply translations in fr - * Replace trigger folders with type specific filtered list pages so that they can be sortable within types - -v6.4.7 ----------- - * Update flow editor to include lone-ticketer submit fix - * Fix pagination on the webhook results page - -v6.4.6 ----------- - * Update flow editor to fix not being able to play audio attachments in simulator - -v6.4.4 ----------- - * Start background flows with include_active = true - * Update flow editor with MediaPlayer fix - * Fix poetry content-hash to remove install warning - * Update translations from transifex - -v6.4.3 ----------- - * Improve contact field forms - * Fix urn sorting on contact update - * Improve wording on forms for contact groups, message labels and flow labels - * Improve wording on campaign form - -v6.4.2 ----------- - * Fix attachment button when attachments don't have extensions - * Add missing ticket events to contact history - * Fix clicking attachments in msgs view sometimes navigating to contact page - * Parameterized form widgets. Bigger, darker form bits. - * Tweak trigger forms for clarity - * Add command to rebuild messages and pull translations from transifex - -v6.4.1 ----------- - * Fix unassigning tickets - -v6.4.0 ----------- - * Update README - -v6.3.90 ----------- - * Fix alias editor to post json - -v6.3.89 ----------- - * Remove beta grating of internal ticketers - * Control which users can have tickets assigned to them with a permission - * Use mailroom endpoints for ticket assignment and notes - * Add custom user recover password view - -v6.3.88 ----------- - * Fix to display email on manage orgs - * Drop no longer used Broadcast.is_active field - -v6.3.87 ----------- - * Update indexes on ticket model - * Tweak ticketer default names - * Add empty ticket list treatment - * Fix API docs for messages endpoint to mention attachments rather than the deprecated media field - * Update to editor with hidden internal ticketers - * Consistent setting of modified_by when releasing/archiving/restoring - * Remove old ticket views - * Change ticketer sections on org home page to have Remove button and not link to old ticket views - * Add assignee to ticketing endpoints, some new filters and new assignment view - -v6.3.86 ----------- - * Stop writing Broadcast.is_active as default value - * Fix keyword triggers being imported without a valid match_type - -v6.3.85 ----------- - * User the current user as the manual trigger user during simulation - * Better trigger exports and imports - * Make broadcast.is_active nullable and stop filtering by it in the API - -v6.3.84 ----------- - * Ignore scheduled triggers in imports because they don't import properly - * Fix redirect after choosing an org for users that can't access the inbox - * Optionally filter ticket events by ticket in contact history view - -v6.3.83 ----------- - * Fix default content type for pjax requests - * Tweak queuing of flow starts to include created_by_id - -v6.3.82 ----------- - * Revert recent formax changes - -v6.3.81 ----------- - * Add Broadcast.ticket and expose as field (undocumented for now) on broadcast write API endpoint - * Refactor scheduling to use shared form - * Add exclusion groups to scheduled triggers - -v6.3.80 ----------- - * Update components so omnibox behaves like a field - * Drop Language model and Org.primary_language field - -v6.3.79 ----------- - * Order tickets by last_activity_on and update indexes to reflect that - * Backfill ticketevent.contact and use that for fetching events in contact history - * Fix creating scheduled triggers not being able to see week day options - * Handle reopen events for tickets - * Stop creating Language instances or setting Org.primary_language - -v6.3.78 ----------- - * Add Ticket.last_activity_on and TicketEvent.contact - * Rreturn tickets by modified_on in the API - * Add ability to reverse results for runs/contacts API endpoints - -v6.3.77 ----------- - * Better validation of invalid tokens when claiming Zenvia channels - * Fix languages formax to not allow empty primary language - -v6.3.76 ----------- - * Read org languages from org.flow_languages instead of Language instances - -v6.3.75 ----------- - * Fix closing and reopening of tickets from API - -v6.3.74 ----------- - * Add better labels and help text for groups on trigger forms - * Load ticket events from database for contact histories - * Fix rendering of closed ticket triggers on trigger list page - * Fix rendering of ticket events as JSON - * Fix for delete modals - -v6.3.73 ----------- - * Backfill ticket open and close events - * Add support for closed ticket triggers - -v6.3.72 ----------- - * Add CSRF tokens to modaxes - -v6.3.70 ----------- - * Add CSRF token to modax form - * Tweak padding for nav so we don't overlap alerts - * Only require current password to change email or password - * Fix icon colors on latest chrome - * Migration to backfill Org.flow_languages - -v6.3.69 ----------- - * Add Org.flow_languages and start populating in Org.set_languages - * Raise the logo so it can be clicked - -v6.3.68 ----------- - * Enable exclusion groups on triggers and make groups an option for all trigger types - * Add users to mailroom test db - * Add ticket note support to UI - -v6.3.67 ----------- - * Pass user id to ticket/close ticket/reopen endpoints to use in the TicketEvent mailroom creates - * Model changes for ticket assignment - * Make flow session output URL have a max length of 2048 - -v6.3.66 ----------- - * Add new ticket event model - * Add output_url field to FlowSession - -v6.3.65 ----------- - * Fix rendering of recipient buttons on outbox - * Rework trigger create forms to make conflict handling more consistent - * Iterate through all pages when syncing whatsapp templates - -v6.3.64 ----------- - * URL field on HTTPRequestLog should have max length of 2048 - -v6.3.63 ----------- - * Drop unused index on contact name, and add new org+modified_on index - -v6.3.62 ----------- - * Update components to single mailroom resource for completion - -v6.3.60 ----------- - * Only retry 5000 messages at a time, prefetch channel and fields - -v6.3.59 ----------- - * Enable model instances to show an icon in selects - -v6.3.58 ----------- - * Add model changes for closed ticket triggers - * Add model changes for exclude groups support on triggers - -v6.3.57 ----------- - * Tweak mailroom_db to make contact created_on values fixed - * Add trigger type folder list views - * Fix filtering of flows for new conversation triggers - * Fix ordering of channel fields on triggers - * Tweak inspect_flows command to handle unreadable flows - * Nest group buttons on campaign list so they don't grow to largest cell - -v6.3.56 ----------- - * Fix migrating flows whose definitions contain decimal values - * Update to tailwind 2, fix security warnings - * Simplify org filtering on CRUDLs - * Remove IS_PROD setting - -v6.3.55 ----------- - * Update layout and color for badge buttons - * Add management command to inspect flows and fix has_issues where needed - * Fix deleting flow labels with parents - * Fix broken org delete modal - * Add user arg to Org.release and User.release - -v6.3.54 ----------- - * Optimize message retries with a perfect index - * Convert channels to soft dependencies - -v6.3.53 ----------- - * Update to latest temba-components - -v6.3.52 ----------- - * Update to latest floweditor - * Adjust WA templates page title - * Fix Dialog360 WA templates sync - -v6.3.51 ----------- - * Adjust WA templates page styles - * Migration to clear next_attempt for android channels - -v6.3.50 ----------- - * Resend messages using web endpoint rather than task - * Convert message labels, globals and classifiers to use soft dependencies - -v6.3.49 ----------- - * Make Msg.next_attempt nullable and add msgs to mailroom_db - * Migration to ensure that inactive flows don't have any deps - * Fix Flow.release to remove template deps - -v6.3.48 ----------- - * Calculate proper msg id commands from relayer that have integer overflow issue - * Add reusable view for dependency deleting modals and switch to that and soft dependencies for ticketers - * Don't do mailroom session interruption during org deletion - * Fix org deletion when broadcasts have parents and webhook results have contacts - * Make sure templates and templates translations are deleted on org release - * Set max fba pages limit to 200 - -v6.3.47 ----------- - * Display warning icon in flow list for flows with issues - * Make Flow.has_issues non-null and cleanup unused localized strings on Flow model - * Support syncing Dialog360 Whatsapp templates - -v6.3.46 ----------- - * Fix channel log icons and disallow message resending for suspended orgs - * Add migration to populate Flow.has_issues - -v6.3.45 ----------- - * Add migration to populate template namespace - * Expose template translation namespace field on API - * Don't save issues into flow metadata but just set new field has_issues instead - * Queue mailroom task to do msg resends - -v6.3.44 ----------- - * Tweak import preview page so when adding to a group isn't enabled, the group controls are disabled - * Update flow editor and temba-components - -v6.3.40 ----------- - * Add namespace field to template translations - * Fetching and saving revisions should return flow issues as separate field - -v6.3.39 ----------- - * Rework task for org deletion - -v6.3.38 ----------- - * Move tickets endpoint to tickets crudl - * Refactor WhatsApp templates - * Add task for releasing of orgs - -v6.3.37 ----------- - * Fix contact imports always creating new groups - * Migration to fix escaped nulls in flow revision definitions - * Rework beta gated agent views to be tikect centric - -v6.3.35 ----------- - * Clear primary language when releasing org - * Strip out NULL characters when serializing JsonAsTextField values - * Override language names and ensure overridden names are used for searching and sorting - -v6.3.33 ----------- - * Update components and flow editor to common versions - * Allow external ticketers to use agent ui, add footer to tickets - -v6.3.32 ----------- - * Release import batches when releasing contact imports - -v6.3.31 ----------- - * Fix serializing JSON to send to mailroom when it includes decimals - -v6.3.30 ----------- - * Restrict org languages to ISO-639-1 plus explicit inclusions - -v6.3.29 ----------- - * Move Twilio, Plivo and Vonage number searching views into their respective channel packages - * Optimize query for fetching contacts with only closed tickets - * Release contact imports when releasing groups - * Proper skip anonymous user for analytics - -v6.3.28 ----------- - * Remove simplejson - * Update to latest vonage client and fix retries - -v6.3.27 ----------- - * Restore menu-2 icon used by org choose menu - -v6.3.26 ----------- - * Make groups searchable on contact update page - -v6.3.25 ----------- - * Add beta-gated tickets view - -v6.3.24 ----------- - * Change analytics.track to expect a user argument - * Add org released_on, use when doing full releases - * Ignore anon user in analytics - -v6.3.23 ----------- - * Clean up countries code used by various channel types - -v6.3.22 ----------- - * Show results in flow order - -v6.3.21 ----------- - * Fix Javascript error on two factor formax - * Beta-gate chatbase integration for now - -v6.3.20 ----------- - * Rework DT One and Chatbase into a new integrations framework - * Expose Org.language as default language for new users on org edit form - -v6.3.19 ----------- - * Add support for Zenvia SMS - * Cleanup parsing unused code on org model - * Fix flow update forms to show correct fields based on flow type - * Tweak JSONAsTextField to allow underlying DB column to be migrated to JSONB - * Add controls to import preview page for selecting existing groups etc - -v6.3.18 ----------- - * Fix template names - -v6.3.17 ----------- - * Fix font reference in scss - -v6.3.16 ----------- - * Add group name field to contact imports so that it can be customized - * Rename Nexmo to Vonage, update icon - * Merge the two used icomoon sets into one and delete unused one - * Cleanup problems in org view templates - -v6.3.15 ----------- - * Revert wording changes when orgs don't have email settings to clarify that we do send - * Fix wording of Results link in editor - -v6.3.14 ----------- - * Fix locale files - * Fix SMTP server settings views to explain that we don't send emails if you don't have a config - * Add API endpoint to fetch tickets filterable by contact - -v6.3.13 ----------- - * Clarify terms for exports vs downloads - * Fix rendering of airtime events in contact history - * Add flows import and flow exports links in the flows tab - -v6.3.12 ----------- - * Update to latest flow-editor - * Cleanup unused dates methods - * Update markdown dependency - * Expose exclude_active on flow start read API - * Support 3 digits short code on Jasmin channel type - * Add support for YYYY-MM-DD date format - * Update DT One support to collect api key and secret to use with new API - * Update parent remaining credits - * Release broadcasts properly - -v6.3.11 ----------- - * Fix redirect after submitting Start In Flow modal - -v6.3.10 ----------- - * Add support to exclude active contacts in other flows when starting a flow on API - * Remove unsupported channel field on broadcast create API endpoint - * Add Start Flow modal to contact read page - * Fix lock file being out of sync with pyproject - -v6.3.9 ----------- - * Revert update to use latest API version to get WA templates - * Fix setting Zenvia webhooks - * Update Django and Django REST Framework - -v6.3.8 ----------- - * Convert to poetry - -v6.3.6 ----------- - * Update pt_BR translation - * Update to use latest API version to get WA templates - * Display failed on flow results charts, more translations - * Zenvia WhatsApp - -v6.3.5 ----------- - * Fix broken flow results charts - -v6.3.4 ----------- - * Update to latest celery 4.x - -v6.3.2 ----------- - * Support reseting the org limits to the default settings by clearing the form field - * Update redis client to latest v3.5.3 - * Fix manage accounts form blowing up when new user has been created in background - -v6.3.1 ----------- - * Add support for runs with exit_type=F - * Support customization for org limits - -v6.3.0 ----------- - * Update stable versions and coverage badge link - * Style Outbox broadcasts with megaphone icons and use includes for other places we render contacts and groups - * Fix spacing on outbox view - * Add discord channel type - -v6.2.4 ----------- - * Update Portuguese translation - * Update to floweditor v1.13.5 - -v6.2.3 ----------- - * Update to latest floweditor v1.13.4 - -v6.2.2 ----------- - * Update to flow editor v1.13.3 - * Update Spanish translation - * Disable old Zenvia channel type - * Fix styles on fields list - -v6.2.1 ----------- - * Return registration details to Android if have the same UUID - * Add spacing between individual channel log events - * Fix external channel claim form - * Do not track Android channels creation by anon user - -v6.2.0 ----------- - * Update translations for es, fr and pt-BR - * Fix rendering of pending broadcasts in outbox view - -v6.1.48 ----------- - * Update editor with dial router changes - * Fix resthook formax validation - -v6.1.47 ----------- - * Change synched to synced - * Update to smartmin 2.3.5 - * Require recent authentication to view backup tokens - -v6.1.46 ----------- - * Update to smartmin 2.3.5 - * Fix handling of attempts to sync old unclaimed channels - * Add view to list all possible channel types - * Fix rendering of nameless channels - -v6.1.45 ----------- - * Open up 2FA to all users - * Do not allow duplicates invites - * Never respond with registration commands in sync handler - -v6.1.44 ----------- - * Enforce time limit between login and two factor verification - * Prevent inviting existing users - * Add disabled textinputs and better expression selection on selects - * Create failed login records when users enter incorrect backup tokens too many times - * Logout user to force login to accept invite and require invite email account exactly - -v6.1.43 ----------- - * Backup tokens can only be used once - * Add new 2FA management views - -v6.1.42 ----------- - * Use Twilio API to determine capabilities of new Twilio channels - * Fix result pages not loading for users using Spanish interface - -v6.1.41 ----------- - * Remove no longer used permissions - * Override login view to redirect to new views for two-factor authentication - * Reduce recent export window to 4 hours - * Change message campaign events to use background flows - -v6.1.40 ----------- - * Remove UserSettings.tel and add UserSettings.last_auth_on - -v6.1.39 ----------- - * Increase max len of URN fields on airtime transfers - * Add toggle to display manual flow starts only - * Cleanup 2FA models - -v6.1.38 ----------- - * Update flow editor to 1.12.10 with failsafe errors - * Make validation of external channel URLs disallow private and link local hosts - * Cleanup middleware used to set org, timezone and language - -v6.1.37 ----------- - * Update components and editor to latest versions - * Switch to microsecond accuracy timestamps - * Switch to default_storage for export assets - -v6.1.33 ----------- - * Tweaks to how we generate contact histories - -v6.1.32 ----------- - * Mute invalid host errors - * Add migration to alter m2ms to use bigints - * Drop no longer used database function - * Switch to big id for msgs and channel logs - -v6.1.31 ----------- - * Add management command to check sentry - * Remove unused context processor and unused code from org_perms - -v6.1.29 ----------- - * Rework contact history so that rendering as events happens in view and we also expose a JSON version - -v6.1.26 ----------- - * Upgrade urllib3 - -v6.1.25 ----------- - * Update to elastic search v7 - -v6.1.24 ----------- - * Broadcast events in history should be white like message events - -v6.1.23 ----------- - * Add index on flow start by start type - * Allow only deleting msg folders without active children labels - * Use engine events (with some extra properties) for msgs in contact history - -v6.1.22 ----------- - * Fix API serialization of background flow type - * Allow background flows to be used in scheduled triggers - * Update pip-tools - -v6.1.21 ----------- - * Configure editor and components to use completions files in current language - -v6.1.20 ----------- - * Update to latest floweditor and temba-components - -v6.1.19 ----------- - * Update to floweditor v1.12.6 - * Fix deleting classifiers - -v6.1.18 ----------- - * Add support for background flows - -v6.1.17 ----------- - * Update to flow editor v1.12.5 - * Fix importing dependencies when it's a clone in the same workspace - * Allow aliases to be reused on boundaries with different parent - * Increase max length on external channels to be configurable up to 6400 chars - * Fix contact export warning for existing export - -v6.1.16 ----------- - * Update to latest flow editor 1.12.3 - * Allow staff users to use the org chooser - -v6.1.15 ----------- - * Add constraint to chek URN identity mathes scheme and path - * Add non-empty constraint for URN scheme and path - * Fix contact list pagination with searches - * Show query on list page for smart groups - -v6.1.14 ----------- - * Change template translations to be TEXT - * Set global email timeout, fixes rapidpro #1345 - * Update tel parsing to match gocommon, fixing how we currently accept local US numbers - -v6.1.13 ----------- - * Bump temba-components to v0.8.11 - -v6.1.12 ----------- - * Un-beta-gate Rocket.Chat channels - -v6.1.10 ----------- - * Login summary on org home page should include agents - * Rework manage accounts UI to include agents - -v6.1.9 ----------- - * Fix deleted flow dependency preventing global deletion - * Cache lookups of auth.Group instances - -v6.1.8 ----------- - * For field columns in imports, only match against user fields - * Add agent role and cleanup code around org roles - -v6.1.7 ----------- - * Wire table listeners on pjax reload - * Update domain from swag.textit.com to whatsapp.textit.com - * Add internal ticketer type for BETA users - * Inner scrolling on contact list page - * Improve styles for recipient lists - -v6.1.6 ----------- - * Trim our start runs 1,000 at a time and by id - * Increase global max value length to 10000 and fix UI to be more consistent with fields - -v6.1.5 ----------- - * Share modals on globals list, truncate values - * Squash migrations - -v6.1.4 ----------- - * Add security settings file - * Fix intent selection on split by intent - * Add empty migrations for squashing in next release - -v6.1.3 ----------- - * Fix intent selection on split by intent - * Update callback URL for textit whatsapp - * Use Django password validators - -v6.1.2 ----------- - * Add TextIt WhatsApp channel type - -v6.1.1 ----------- - * Fix contact exports when orgs have orphaned URNs in schemes they don't currently use - -v6.1.0 ----------- - * Hide editor language dialog blurb until needed to prevent flashing - * Fix broken flows list page if org has no flows - * Allow underscores in global names - * Improve calculating of URN columns for exports so tests don't break every time we add new URN schemes - * Make instruction lists on channel claim pages more consistent - -v6.0.8 ----------- - * Editor fix for split by intents - * Add empty migrations for squashing in next release - -v6.0.7 ----------- - * Fix choose org page - * Fix recipient search - * Fix run deletion - -v6.0.6 ----------- - * Fix for textarea init - -v6.0.5 ----------- - * Adjust contact icon color in recipient lists - -v6.0.4 ----------- - * Fix recipients contacts and urns UI labels - * Fix flow starts log page pagination - * Update temba-components and flow-editor to common versions - * Fix flow label delete modal - * Fix global delete modal - -v6.0.3 ----------- - * Update to components v0.8.6, bugfix release - * Handle CSV imports in encodings other than UTF8 - -v6.0.2 ----------- - * Fix broken ticket re-open button - * Missing updated Fr MO file from previous merge - * Apply translations in fr - -v6.0.1 ----------- - * Fix orgs being suspended due to invalid topup cache - * Set uses_topups on new orgs based on whether our plan is the TOPUP_PLAN - * Fix validation issues on trigger update form - * Fix hover cursor in lists for viewers - * Action button alignment on archived messages - * Fix flow table header for viewers - * Fix tests for channel deletion - * Fix redirects for channel and ticketer deletion. - * Fix dialog when deleting channels with dependencies - * Match headers and contact fields with labels as well as keys during contact imports - -v6.0.0 ----------- - * Add Rocket.Chat ticketer to test database - -v5.7.91 ----------- - * Add Rocket.Chat ticketers - -v5.7.90 ----------- - * Update rocket.chat icon in correct font - -v5.7.89 ----------- - * Improve Rocket.Chat claim page - * Add Rocket.Chat icon - -v5.7.87 ----------- - * Cleanup Rocket.Chat UI - -v5.7.86 ----------- - * Add RocketChat channels (beta-only for now) - -v5.7.85 ----------- - * Add back jquery-migrate and remove debug - -v5.7.84 ----------- - * Remove select2, coffeescript, jquery plugins - -v5.7.83 ----------- - * Fix broken import link on empty contacts page - * Use consistent approach for limits on org - * Globals UI should limit creation of globals to org limit - * Fix archives list styles and add tabs for message and run archives - * Restyle the Facebook app channel claim pages - * Switch to use FBA type by default - -v5.7.82 ----------- - * Don't blow up if import contains invalid URNs but pass values on to mailroom - * Update to version of editor with some small styling tweaks - * Include occurred_on with mo_miss events queued to mailroom - * Adjust Twilio connect to redirect properly to the original claim page - * Remove no longer used FlowRun.timeout_on and drop two unused indexes - * Cleanup more localized strings with trimmed - * Fix 404 error in channel list - -v5.7.81 ----------- - * Add page title to brand so that its configurable - * Dont send alert emails for orgs that aren't using topups - * Consider timezone when infering org default country and display on import create page - * Add page titles to fields and flows - * Allow changing EX channels role on UI - -v5.7.80 ----------- - * Add contact last seen on to list contacts views - * Cleanup channel model fields - * Add charcount to send message dialog - * Show channel logs link for receive only channels - * Fix export flow page styles - * Allow searching for countries on channel claim views - -v5.7.79 ----------- - * Rework imports to allow importing multiple URNs of same scheme - * Cleanup no longer used URN related functionality - * Show contact last seen on on contact read page - -v5.7.78 ----------- - * Clean up models fields in contacts app - -v5.7.77 ----------- - * Fix styling on the API explorer page - * Fix list page selection for viewers - * Move contact field type constants to ContactField class - * Allow brand to be set by env variable - -v5.7.76 ----------- - * Drop support for migrating legacy expressions on API endpoints - * Fix imports blowing up when header is numerical - * Fix 11.4 flow migration when given broken send action - * Drop RuleSet and ActionSet models - -v5.7.75 ----------- - * Last tweaks before RuleSet and ActionSet can be dropped - * Contact id treatment for details - * Update components to ship ajax header and use it in language endpoint - * Remove no longer needed legacy editor completion - -v5.7.74 ----------- - * Remove legacy flow code - * WA channel tokens refresh catch errors for each channel independently - -v5.7.73 ----------- - * Make flows searchable and clickable on triggers - * Make flows searchable on edit campaign event - -v5.7.72 ----------- - * Fix editor whatsapp templates, refresh whatsapp channel pages - * Move omnibox module into temba.contacts.search - -v5.7.71 ----------- - * Remove legacy contact searching - * Remove code for dynamic group reevaluation and campaign event scheduling - -v5.7.70 ----------- - * Fix pdf selection - -v5.7.69 ----------- - * Validate language codes passed to contact API endpoint - * Don't actually create a broadcast if sending to node but nobody is there - * Update to latest floweditor - -v5.7.67 ----------- - * Fix globals endpoint so name is required - * Filter by is_active when updating fields on API endpoint - -v5.7.66 ----------- - * Replace remaining Contact.get_or_create calls with mailroom's resolve endpoint - -v5.7.65 ----------- - * URN lookups onthe contact API endpoint should be normalized with org country - * Archiving a campaign should only recreate events - -v5.7.64 ----------- - * Don't create contacts and URNs for broadcasts but instead defer the raw URNs to mailroom - -v5.7.63 ----------- - * Validate that import files don't contain duplicate UUIDs or URNs - -v5.7.62 ----------- - * Update version of editor and components - * Upload imports to use UUID based path - * Fix issue where all keywords couldnt be removed from a flow - -v5.7.61 ----------- - * Remove old editor, redirect editor_next to editor - -v5.7.60 ----------- - * Fix contact imports from CSV files - * Tweaks to import UI - -v5.7.59 ----------- - * Imports 2.0 - -v5.7.55 ----------- - * Use v13 flow as example on definitions endpoint docs - * Add URNs field to FlowStart and pass to mailroom so that it creates contacts - -v5.7.54 ----------- - * Update editor to get support for expressions in add to group actions - * Remove unused localized text on Msg and Broadcast - -v5.7.52 ----------- - * Migrations and models for new imports - -v5.7.51 ----------- - * Add plan_start, calculate active contacts in plan period, add to OrgActivity - * Tweak how mailroom_db creates extra group contacts - * Update to latest django-hamlpy - -v5.7.50 ----------- - * Optimizations for orgs with many contact fields - -v5.7.49 ----------- - * Update plan_end when suspending topup orgs - * Suspend topup orgs that have no active credits - * Show suspension header when an org is suspended - * Tweak external channel config styling - * Fix styles for button on WA config page - -v5.7.48 ----------- - * Fix button style for channel extra links - * Skip components missing text for WA templates sync - * Editors should have API tokens - -v5.7.47 ----------- - * Queue mailroom task to schedule campaign events outside of import transaction - * Fix margin on fields warning alert - -v5.7.46 ----------- - * Use mailroom task for scheduling of campaign events - -v5.7.45 ----------- - * Make sure form._errors is a list - -v5.7.44 ----------- - * Add index to enforce uniqueness for event fires - -v5.7.43 ----------- - * Fix migration - -v5.7.42 ----------- - * Bump smartmin to 2.2.3 - * Fix attachment download and pdf links - -v5.7.41 ----------- - * Fix messages to send without topup, and migrations - * No topup transfers on suborgs, show contacts, not credits - -v5.7.40 ----------- - * Invalid language codes passed to contact API endpoint should be ignored and logged for now - -v5.7.39 ----------- - * Update widget focus and borders on legacy editor - * Show global form errors and pre-form on modax template - -v5.7.38 ----------- - * Add alpha sort and search to results view - * Searchable contact fields and wired listeners after group changes - * Force policy redirect on welcome page, honor follow-on navigation redirect - * Use mailroom for contact creation in API and mailroom_db command - * Adjust styling for contact import scenarios - * Show address when it doesn't match channel name - -v5.7.37 ----------- - * add topup button to topup manage page - -v5.7.36 ----------- - * Fix deleting ticketers - -v5.7.35 ----------- - * Zendesk file view needs to be csrf exempt - * Use mailroom to create contacts from UI - -v5.7.34 ----------- - * Add view to handle file URL callbacks from Zendesk - -v5.7.33 ----------- - * Fix delete button on archived contacts page - * Don't allow saving queries that aren't supported as smart groups - * Delete no longer used contacts/fields.py - * Fix contacts reppearing in ES searches after being modified by a bulk action - * Adjust pjax block for contact import block - -v5.7.32 ----------- - * Modal max-height in vh to not obscure buttons - -v5.7.31 ----------- - * Add padding for p tags on policies - -v5.7.30 ----------- - * Add content guideline policy option, update styling a bit - -v5.7.29 ----------- - * Sitewide refresh of styles using Tailwind - -v5.7.27 ----------- - * Site refresh of styles using Tailwind. - -v5.7.28 ----------- - * Update to flow editor v1.9.15 - -v5.7.27 ----------- - * Update to flow editor v1.9.14 - * Add support for last_seen_on in legacy search code - -v5.7.26 ----------- - * Handle large deletes of contacts in background task - -v5.7.25 ----------- - * Fix bulk actions against querysets from ES searches - * Fix bulk action permissions on contact views - -v5.7.24 ----------- - * Rename existing 'archive' contact action in API to 'archive_messages' - * Allow deleting of all contacts from Archived view - -v5.7.23 ----------- - * Rename All Contacts to Active - * Add UI for archiving, restoring and deleting contacts - -v5.7.22 ----------- - * Bump version of mailroom and indexer used for tests - * Drop no longer used is_blocked and is_stopped fields - -v5.7.21 ----------- - * Add missing migration from last rev - -v5.7.20 ----------- - * Add missing migration - -v5.7.19 ----------- - * Make contact.is_stopped and is_blocked nullable and stop writing - -v5.7.18 ----------- - * Update sys group trigger to handle archiving - -v5.7.17 ----------- - * Migration to add Archived sys group to all orgs - -v5.7.16 ----------- - * Update to flow editor 1.9.11 - * Update database triggers to use contact status instead of is_blocked or is_stopped - * Make contact.status non-null - * Create new archived system group for new orgs - -v5.7.15 ----------- - * Add nag warning to legacy editor - -v5.7.14 ----------- - * Migration to backfill contact status - -v5.7.13 ----------- - * Enable channelback files for Zendesk ticketers - * Set status as active for new contacts - * Add new status field to contact - * Fix legacy editor by putting html-tag block back - * Change the label for CM channel claim - -v5.7.12 ----------- - * Fix imports that match by UUID - * Fix Nexmo search numbers and claim number - * Use Django language code on html tag - * Add support for ClickMobile channel type - -v5.7.11 ----------- - * Fix creating of campaign events based on last_seen_on - * Tweak msg_console so it can include sent messages which are not replies - * Fix mailroom_db command - * Expose last_seen_on on contact API endpoint - -v5.7.10 ----------- - * Update floweditor to 1.9.10 - * Add Last Seen On as a system field so it can be used in campaigns - * Tweak search_archives command to allow JSONL output - -v5.7.9 ----------- - * Fix reading of S3 event streams - * Migration to populate contact.last_seen_on from msg archives - -v5.7.8 ----------- - * Add plan_end field to Orgs - -v5.7.7 ----------- - * Add search archives management command - -v5.7.6 ----------- - * Optimizations to migration to backfill last_seen_on - -v5.7.5 ----------- - * Add migration to populate contact.last_seen_on - * Update to latest temba-components with support for refresh work - -v5.7.4 ----------- - * Use new metadata field from mailroom searching endpoints - * Make sure we have only one active trigger when importing flows - * Fix org selector and header text alignment when editor is open - -v5.7.3 ----------- - * Add contact.last_seen_on - * Bump floweditor to v1.9.9 - -v5.7.2 ----------- - * Add error messages for all error codes from mailroom query parsing - * Fix org manage quick searches - * Always use mailroom for static group changes - -v5.7.1 ----------- - * Add session history field to flowstarts - * Have mailroom reset URNs after contact creation to ensure order is correct - -v5.7.0 ----------- - * Add start_type and created_by to queued flow starts - * New mixin for list views with bulk actions - * Update some dependencies to work with Python 3.8 and MacOS - -v5.6.5 ----------- - * Set the tps options for Twilio based on country and number type - * Fix wit.ai classifiers and double logging of errors on all classifier types - -v5.6.3 ----------- - * Add variables for nav colors - -v5.6.2 ----------- - * Fix failing to manage logins when the we are logged in the same org - -v5.6.1 ----------- - * instead of dates, keep track of seen runs when excluding archived runs from exports - -v5.6.0 ----------- - * 5.6.0 Release Candidate - -v5.5.78 ----------- - * Improve the visuals and guides on the FBA claim page - * Block flow starts and broadcasts for suspended orgs - * Add a way to suspend orgs from org manage page - -v5.5.77 ----------- - * Subscribe to the Facebook app for webhook events - -v5.5.76 ----------- - * Add Facebook App channel type - -v5.5.75 ----------- - * always update both language and country if different - -v5.5.74 ----------- - * allow augmentation of templates with new country - -v5.5.73 ----------- - * Add support for urn property in search queries - * Add support for uuid in search queries - * Set country on WhatsApp templates syncing and add more supported languages - * Add country on TemplateTranslation - -v5.5.72 ----------- - * Use modifiers for field value updates - -v5.5.71 ----------- - * Fix to allow all orgs to import flows - -v5.5.70 ----------- - * Use modifiers and mailroom to update contact URNs - -v5.5.69 ----------- - * Refresh contact after letting mailroom make changes - * Contact API endpoint can't call mailroom from within a transaction - -v5.5.68 ----------- - * Fix contact update view - * Allow multi-user / multi-org to be set on each org - * Fix additional urls import - -v5.5.66 ----------- - * Implement Contact.update_static_groups using modifiers - * Consistent use of account/login/workspace - -v5.5.64 ----------- - * Fix editor - -v5.5.63 ----------- - * Make new org fields non-null and remove no longer needed legacy method - -v5.5.62 ----------- - * Rename whitelisted to verified - * Add migration to populate new org fields - -v5.5.61 ----------- - * Add new boolean fields to org for suspended, flagged and uses_topups and remove no longer used plan stuff - -v5.5.60 ----------- - * Move webhook log button to flow list page - * Add confirmation dialog to handle flow language change - -v5.5.59 ----------- - * Update to floweditor v1.9.8 - -v5.5.58 ----------- - * Update to floweditor 1.9.7 - * Remove BETA gating for tickets - -v5.5.57 ----------- - * Restore logic for when dashboard and android nav icons should appear - * Add translations in ru and fr - -v5.5.56 ----------- - * Improvements to ticketer connect views - * Still need to allow word only OSM ids - -v5.5.55 ----------- - * Fix boundaries URL regex to accept more numbers - -v5.5.54 ----------- - * Add index for mailroom looking up tickets by ticketer and external ID - * Make it easier to differentiate open and closed tickets - * Update to temba-components 0.1.7 for chrome textinput fix - -v5.5.53 ----------- - * Add indexes on HTTP log views - * Simplify HTTP log views for different types whilst given each type its own permission - -v5.5.52 ----------- - * More ticket view tweaks - -v5.5.51 ----------- - * Tweak zendesk manifest view - -v5.5.50 ----------- - * Tweak zendesk mailroom URLs - -v5.5.49 ----------- - * Store brand name in mailgun ticketer config to use in emails from mailroom - -v5.5.48 ----------- - * Defer to mailroom for ticket closing and reopening - -v5.5.47 ----------- -* Beta-gated views for Mailgun and Zendesk ticketers - -v5.5.46 ----------- - * Bump black version - * Fix layering of menu with simulator - -v5.5.45 ----------- - * Increase the template name field to accept up to 512 characters - * Make sending of Stripe receipts optional - * Add OrgActivity model that tracks contacts, active contacts, incoming and outgoing messages - -v5.5.43 ----------- - * Fix JS escaping on channel log page - -v5.5.42 ----------- - * Remove csrf exemption for views that don't need it (all our pjax includes csrf) - * Escape translations in JS literals - * Upgrade FB graph API to 3.3 - -v5.5.41 ----------- - * Use branding keys when picking which orgs to show on manage - -v5.5.40 ----------- - * Allow branding to have aliases - * Fix bug of removing URNs when updating fields looking up by URN - -v5.5.39 ----------- - * Update to floweditor 1.9.6 - * New task to track daily msgs per user for analytics - * Add support for Russian as a UI language - * Models and editor API endpoint for tickets - * Skip duplicate relayer call events - -v5.5.38 ----------- - * Update to flow editor 1.9.5 - * Allow custom TS send URLs - -v5.5.37 ----------- - * Remove all uses of _blank frame name - * Strip exif data from images - -v5.5.36 ----------- - * Better tracking of channel creation and triggers, track simulation - * Do not use font checkboxes for contact import extra fields - -v5.5.35 ----------- - * Revert Segment.io identify change to stay consistent with other tools - -v5.5.34 ----------- - * Identify users in Segment.io using best practice of user id, not email - -v5.5.33 ----------- - * Add context processor to stuff analytics keys into request context - * Restrict 2FA functionality to BETA users - -v5.5.32 ----------- - * Add basic 2FA support - -v5.5.31 ----------- - * Update to latest smartmin - -v5.5.30 ----------- - * Add new flow start type to record that flow was started by a Zapier API call - * Contact bulk actions endpoint should error if passed no contacts - * Remove mentioning the countries for AT claim section - * Add Telesom channel type - -v5.5.29 ----------- - * Fix trimming flow starts with start counts - -v5.5.28 ----------- - * Update Africa's Talking supported countries - -v5.5.27 ----------- - * Remove temporary NOOP celery tasks - * Drop Contact.is_paused field - * Editor 1.9.4, better modal centering - -v5.5.26 ----------- - * Add NOOP versions of renamed celery tasks to avoid problems during deploy - -v5.5.23 ----------- - * Remove default value on Contact.is_paused so it can be dropped - * Trim completed mailroom created flow starts - * Update flow starts API endpoint to only show user created flow starts and add index - -v5.5.22 ----------- - * Add nullable contact.is_paused field - * Display run count on flow start list page - -v5.5.21 ----------- - * Optimze flow start list page with DB prefetching - * Indicate on flow start list page where start was created by an API call - -v5.5.20 ----------- - * Use actual PO library to check for msgid differences - * Migration to backfill FlowStart.start_type - * Log error of WA channel failing to sync templates - -v5.5.19 ----------- - * Add FlowStart.start_type - * Ensure flow starts created via the API are only sent to mailroom after the open transaction is committed - -v5.5.18 ----------- - * Add flow start log page - -v5.5.17 ----------- - * Add index to list manually created flow starts - * Make FlowStart.org and modified_on non-NULL - * Move contact modification for name and language to be done by mailroom - -v5.5.16 ----------- - * bower no longer supported for package installs - * Migration to backfill FlowStart.org and modified_on - -v5.5.15 ----------- - * Update to flow-editor 1.9.2, security patches - -v5.5.14 ----------- - * Ensure IVR retry is preserved on new revisions - * Import flows for mailroom test db as v13 - * Make UUID generation fully mockable - * Add run UUID on flow results exports - * Drop unused fields on FlowStart and add org - -v5.5.13 ----------- - * Stop using FlowStart.modified_on so that it can be removed - * Disable syncing templates with variables in headers and footers - -v5.5.12 ----------- - * Import and export of PO files - -v5.5.10 ----------- - * Bump up the simulator when popped so it fits on more screens - * Editor performance improvements - -v5.5.8 ----------- - * Update help text on contact edit dialog - * Add prometheus endpoint config on account page - * Fix boundary aliases filtering by org - -v5.5.7 ----------- - * Fix open modal check on pjax refersh - * Show warnings on contact field page when org is approaching the limit and has hit the limit - -v5.5.6 ----------- - * Temporaly disable templates requests to FB when claiming WA channels - -v5.5.5 ----------- - * newest smartmin with BoM fix - -v5.5.4 ----------- - * Show better summary of schedules on trigger list page - * Fix display of trigger on contact group delete modal - -v5.5.3 ----------- - * Update to floweditor 1.8.9 - * Move EX constants to channel type package - * Remove unused deps and address npm security warnings - * Add 18 hours as flow expiration option - * FlowCRUDL.Revisions should return validation errors from engine as detail field - * Allow setting authentication header on External channels - * Add normalize contact tels task - * Drop full resolution geometry, only keep simplified - * Add attachments columns to flow results messages sheet - -v5.5.0 ----------- - * Increase the WA channels tps to 45 by default - -v5.4.13 ----------- - * Fix URL related test errors - -v5.4.12 ----------- - * Don't allow localhost for URL fields - -v5.4.11 ----------- - * Make sure external channel URLs are external - -v5.4.10 ----------- - * Complete FR translations - * Update to floweditor 1.8.8 - -v5.4.9 ----------- - * Fix submitting API explorer requests where there is no editor for query part - * Lockdown redirects on exports - * Add more detailed fresh chat instructions - -v5.4.8 ----------- - * Find and fix more cases of not filtering by org - -v5.4.7 ----------- - * Fix org filtering on updates to globals - * Fix campaign event update view not filtering by event org - * Fix error in API contact references when passed a JSON number - * Replace Whatsapp by WhatsApp - -v5.4.6 ----------- - * Merge pull request #2718 from nyaruka/fe187 - -v5.4.4 ----------- - * fix various filtering issues - -v5.4.3 ----------- - * Update sample flow test - -v5.4.2 ----------- - * remove use of webhook where not appropriate - -v5.4.1 ----------- - * Update sample flows to use @webhook instead of @legacy_extra - -v5.4.0 ----------- - * Add API endpoint to update Globals - * Keep latest sync event for Android channels when trimming - -v5.3.64 ----------- - * Add support for Twilio Whatsapp channel type - -v5.3.63 ----------- - * Add pre_deploy command to check imports/exports - * Fix link to android APK downloads on claim page - -v5.3.62 ----------- - * Temporarily disable resume imports task - -v5.3.61 ----------- - * Fix text of save as group dialog - * Add support to restart export tasks that might have been stopped by deploy - -v5.3.60 ----------- - * Update to latest mailroom - * Add urns to runs API endpoint - -v5.3.59 ----------- - * Update to latest mailroom which returns allow_as_group from query parsing - * Don't create missing contact fields on flow save - -v5.3.57 ----------- - * Update flow editor 1.7.16 - * Fix translations on external channel claim page - * Add tabs to toggle between full flow event history and summary of messages - * Increase the max height on the flow results export modal dialog - -v5.3.56 ----------- - * Add params to flow starts API - * Change name of org_id param in calls to flow/inspect - * Add quick replies variable to external channel claim page - -v5.3.55 ----------- - * Allow editing of allow_international on channel update forms - * Use consistent format for datetimes like created_on on contact list page - -v5.3.54 ----------- - * Hide loader on start flow dialog when there are no channels - -v5.3.53 ----------- - * Fix creation of Android channels - -v5.3.52 ----------- - * Convert Android to dynamic channel type - -v5.3.51 ----------- - * Update to floweditor 1.7.15 - * Add python script to do all CI required formatting and locale rebuilding - * Use mailroom for query parsing for contact exports - * Fix text positioning on list pages - * Fix delete contact group modal buttons when blocked by dependencies - * Completion with upper case functions - -v5.3.50 ----------- - * Migration to set allow_international=true in configs of existing tel channels - * Remove no longer used flow definition caching stuff - -v5.3.49 ----------- - * Use realistic phone numbers in mailroom test db - * Remove contact filtering from flow results page - * Add migration to populate Flow.template_dependencies - -v5.3.48 ----------- - * Use mailroom searching for omnibox results - -v5.3.47 ----------- - * Add template_dependencies m2m - -v5.3.46 ----------- - * Do not subject requests to the API with sessions to rate limiting - * Migration to convert flow dependencies metadata to new format - * Update description on the flow results export to be clear - -v5.3.45 ----------- - * Fix deletion of orgs and locations so that aliases are properly deleted - * Remove syntax highlighting in API explorer as it can't handle big responses - * Use new dependencies format from mailroom - -v5.3.44 ----------- - * Dynamic group creation / reevaluation through Mailroom - -v5.3.43 ----------- - * Update to latest mailroom - -v5.3.42 ----------- - * Fix actions on blocked contact list page - -v5.3.41 ----------- - * Disable simulation for archived flows - * Fix query explosion on Android channel alerts - -v5.3.40 ----------- - * Add subflow parameters to editor - -v5.3.39 ----------- - * Rework migration code so new flows are migrated too - -v5.3.38 ----------- - * Use mailroom for contact searches, contact list pages and flow starts via search - -v5.3.35 ----------- - * Rebuild components - -v5.3.34 ----------- - * Update to flow editor 1.7.13 - * Don't include 'version' in current definitions - * Migrate imports of flows to new spec by default - -v5.3.30 ----------- - * Exclude inactive template translations from API endpoint - -v5.3.29 ----------- - * Fix edge case for default alias dialog - * Add sending back to contact list page - * Save parent result refs in flow metadata - * Change name BotHub to Bothub - -v5.3.28 ----------- - * remove auto-now on modified_on on FlowRun - -v5.3.27 ----------- - * Update to floweditor 1.7.9 - * Warn users if starting for facebook without a topic - -v5.3.26 ----------- - * Allow arbitrary numbers when sending messages - * Componentized message sending - -v5.3.25 ----------- - * Show empty message list if we have archived them all - * Update to flow editior 1.7.8 - * Replace flow/validate call to mailroom with flow/inspect - * Add facebook topic selection - -v5.3.24 ----------- - * Pass version to mailroom migrate endpoint - * Fix saving on alias editor - * Support the whatsapp templates HEADER and FOOTER components - * Write HTTP log for errors in connection - -v5.3.23 ----------- - * Add support for whatsapp templates with headers and footers - * Make sure we have one posterizer form and we bind one click event handler for posterize links - -v5.3.22 ----------- - * Convert add/edit campaign event to components - -v5.3.21 ----------- - * Add UI for managing globals - -v5.3.16 ----------- - * Update to flow editor v1.7.7 - -v5.3.13 ----------- - * Update to floweditor v1.7.5 - * Re-add msg_console management command with new support for mailroom - * Cleanup somes usages of trans/blocktrans - -v5.3.12 ----------- - * Add error and failure events to contact history - * Use form components on campaign create/update - -v5.3.11 ----------- - * Migrate sample flows to new editor - * Localize URNs in API using org country - * Write HTTPLogs for Whatsapp template syncing - * Remove Broadcast recipient_count field - -v5.3.10 ----------- - * Add read API endpoint for globals - -v5.3.9 ----------- - * Add trimming task for flow revisions - * Add models for globals support - * Add FreshChat channel support - -v5.3.8 ----------- - * Make sure imported flows are unarchived - * Validate we do not have a caller on a channel before adding a new one - -v5.3.7 ----------- - * Release URNs on Org release - -v5.3.6 ----------- - * Release Channel sync events and alarms - -v5.3.5 ----------- - * release Campaigns when releasing Orgs - -v5.3.4 ----------- - * Release flow starts when releasing flows - -v5.3.3 ----------- - * Add releasing to Classifiers and HTTPLogs - -v5.3.2 ----------- - * Allow manual syncing of classifiers - -v5.3.1 ----------- - * Update documentation for FB webhook events to subscribe to - -v5.3.0 ----------- - * Fix DT One branding and add new icon - * Fix validation problem on update schedule trigger form - * Use brand when granting orgs, not host - * Update contactsql parser to support same quotes escaping as goflow - -v5.2.6 ----------- - * Change slug for Bothub classifier to 'bothub' - -v5.2.5 ----------- - * Fix various Schedule trigger UI validation errors - * Fix intermittently failing excel export tests - * Add noop reverse in migration - -v5.2.1 ----------- - * Fix order of Schedule migrations (thanks @matmsa27) - -v5.2.0 ----------- - * Show date for broadcast schedules - * Honor initial datetime on trigger schedule ui - -v5.1.64 ----------- - * Update to flow editor version 1.7.3 - * Fix weekly buttons resetting on trigger schedule form validation - * Validate schedule details on schedule trigger form - * Show query editors in contact search - * Add migration to fix schedules with None/NaN repeat_days_of_week values - * Move IE9 shim into the main template header - * Update README with final 5.0 versions - -v5.1.63 ----------- - * Update to flow editor v1.7.2 - -v5.1.62 ----------- - * Validate repeat_days_of_week when updating schedules - * Include airtime transfers in contact history - -v5.1.61 ----------- - * Tweak styling on contact field list page - * Send test email when the SMTP server config are set - -v5.1.60 ----------- - * Add Bothub classifier type - -v5.1.59 ----------- - * Update flow editor to version 1.7.0 - * Add Split by Intent action in flows - * Update Send Airtime action for use with DTOne - -v5.1.58 ----------- - * Unify max contact fields - * Don't allow deletion of flow labels with children - * Rename TransferTo to DTOne - -v5.1.57 ----------- - * Check pg_dump version when creating dumps - * Add missing block super in extra script blocks - * Fix omnibox being not actually required on send message form - * Rework airtime transfers to have separate http logs - * Allow flow starts by query - -v5.1.55 ----------- - * Sync intents on classifier creation - * Trim HTTP logs older than 3 days - -v5.1.54 ----------- - * remove fragile AT links to configuration pages - * Exclude hidden results from flow results page - * Exclude results with names starting with _ from exports - -v5.1.53 ----------- - * Classifier models and views - * HTTPLog models and views - -v5.1.52 ----------- - * add prefetch to retry - -v5.1.51 ----------- - * Add ThinQ Channel Type - -v5.1.50 ----------- - * Fix contact history rendering of broadcast messages with null recipient count - * Fix for start_session action in the editor - -v5.1.49 ----------- - * Fire schedules in Mailroom instead of celery - -v5.1.48 ----------- - * Rework contact history to include engine events - -v5.1.47 ----------- - * Update to flow editor 1.6.20 - -v5.1.46 ----------- - * Rev Flow Editor v1.6.19 - -v5.1.45 ----------- - * Fix rendering of campaigns on export page - * Fix ivr channel logs - * Make FlowRun.status non-NULL - * Make FlowSession.uuid unique and indexed - -v5.1.44 ----------- - * Tidy up fields on flow activity models - - -v5.1.43 ----------- - * Fix styling on create flow dialog - * Make user fields nullable on broadcasts - * Populate repeat_minute_of_hour in data migration - -v5.1.42 ----------- - * Update trigger update views to take into account new schedule fields - -v5.1.41 ----------- - * Update docs on flow start extra to be accessible via @trigger - * Change input selector to work cross-browser on send modal - * Don't inner scroll for modax fetches - -v5.1.40 ----------- - * Fix issues with web components in Microsoft Edge - -v5.1.37 ----------- - * Cleanup Schedule class - * Drop unused columns on FlowRun - * Remove legacy engine code - * Remove legacy braodcast and message sending code - -v5.1.36 ----------- - * Temporarily disable compression for components JS - -v5.1.33 ----------- - * Use new expressions for campaign message events, broadcasts and join group triggers - * List contact fields with new expression syntax and fix how campaign dependencies are rendered - -v5.1.28 ----------- - * Use mailroom to interrupt runs when archiving or releasing a flow - * Re-organize legacy engine code - * Initial library of web components - -v5.1.27 ----------- - * Update to floweditor 1.6.13 - * Allow viewers to do GETs on some API endpoints - -v5.1.26 ----------- - * Fix rendering of campaign and event names in UI - * Move remaining channel client functionality into channel type packages - * Remove unused asset server stuff - -v5.1.25 ----------- - * Update floweditor to 1.6.12 - * Allow viewing of channel logs in anonymous orgs with URN values redacted - -v5.1.24 ----------- - * Cleanup campaighn models fields - -v5.1.23 ----------- - * Really fix copying of flows with nameless has_group tests and add a test this time - -v5.1.22 ----------- - * Remove trigger firing functionality (except schedule triggers) and drop unused fields on trigger - -v5.1.21 ----------- - * Migration to backfill FlowRun.status - -v5.1.20 ----------- - * Limit group fetching to active groups - * Get rid of caching on org object as that's no longer used needed - * Fix importing/copying flows when flow has group dependency with no name - -v5.1.19 ----------- - * Migration to add FlowRun.status - -v5.1.18 ----------- - * Cleanup fields on FlowRun (single migration with no real SQL changes which can be faked) - -v5.1.17 ----------- - * Remove all IVR flow running functionality which is now handled by mailroom - -v5.1.15 ----------- - * Update to flow editor v1.6.11 - * Releasing Nexmo channel shouldn't blow up if application can't be deleted on Nexmo side - -v5.1.14 ----------- - * Fix Nexmo IVR to work with mailroom - * Add migration to populate session UUIDs - * Update to Django 2.2 - * Send topup expiration emails to all org administrators - -v5.1.12 ----------- - * Drop ActionLog model - * Switch to new editor as the default, use v1.6.10 - * Add query field to FlowStart - -v5.1.11 ----------- - * Add FlowSession.uuid which is nullable for now - * Update to floweditor 1.6.9, scrolling rules - -v5.1.10 ----------- - * Update to flow editor 1.6.8, add completion config - * Add FlowStart.parent_summary, start deprecating fields - * Switch to bionic beaver for CI builds - * Add trigger params access to ivr flow - * Drop no longer used Broadcast.purged field - -v5.1.9 ----------- - * Make Broadcast.purged nullable in preparation for dropping it - -v5.1.8 ----------- - * Update floweditor to 1.6.7 and npm audit - -v5.1.7 ----------- - * Remove unused IVR tasks - * Simplify failed IVR call handling - -v5.1.6 ----------- - * Fix format_number to be able to handle decimals with more digits than current context precision - -v5.1.5 ----------- - * Update to flow editor 1.6.6 - -v5.1.4 ----------- - * Update to flow editor 1.6.5 - * Update Django to 2.1.10 - -v5.1.3 ----------- - * Update flow editor to 1.6.3 - -v5.1.2 ----------- - * Remove fields no longer needed by new engine - * Trim sync events in a separate task - -v5.1.1 ----------- - * Stop writing legacy engine fields and make them nullable - * Remove no longer used send_broadcast_task and other unused sending code - * Squash migrations into previously added dummy migrations - -v5.1.0 ----------- - * Populate account sid and and auth token on twilio callers when added - * Disable legacy IVR tasks - -v5.0.9 ----------- - * Add dummy migrations for all migrations to be created by squashing - -v5.0.8 ----------- - * Update recommended versions in README - * Fix API runs serializer when run doesn't have category (i.e. from save_run_result action) - * Update to latest floweditor - * Update search parser to convert timestamps into UTC - -v5.0.7 ----------- - * Force a save when migrating flows - -v5.0.6 ----------- - * Show search error if input is not a date - * Group being imported into should be in state=INITIALIZING whilist being populated, and hide such groups in the UI - * Only add initially changed files in post-commit hook - * Fix to make sure the initial form data is properly shown on signup - -v5.0.5 ----------- - * sync whatsapp templates with unsupported languages, show them as such - -v5.0.4 ----------- - * Update to floweditor v1.5.15 - * Add pagination to outbox - * Fix import of contact field when field exists with same name but different key - * Fix (old) mac excel dates in imports - -v5.0.3 ----------- - * Update flow editor to 1.5.14 - -v5.0.2 ----------- - * Remove reference to webhook API page which no longer exists - * Update to flow-editor 1.5.12 - * Update some LS libs for security - * Tweaks to migrate_to_version_11_1 to handle "base" as a lang key - * Tweak old flow migrations to allow missing webhook_action and null ruleset labels - -v5.0.1 ----------- - * Fix max length for WA claim facebook_access_token - * Fix WhatsApp number formatting on contact page, add icon - -v5.0.0 ----------- - * add validation of localized messages to Travis - -v4.27.3 ----------- - * Make contact.is_test nullable - * Migration to remove orphaned schedules and changes to prevent creating them in future - * Migration to merge path counts from rules which are merged into a single exit in new engine - -v4.27.2 ----------- - * fix broadcast API test - -v4.27.1 ----------- - * temporarily increase throttling on broadcasts endpoint - -v4.27.0 ----------- - * Cleanup webhook fields left on Org - * Stop checking flow_server_enabled and remove support for editing it - -v4.26.1 ----------- - * Remove no longer used check_campaigns_task - -v4.26.0 ----------- - * Remove handling of incoming messages, channel events and campaigns.. all of which is now handled by mailroom - -v4.25.0 ----------- - * Add sentry error to handle_event_task as it shouldnt be handling anything - * Remove processing of timeouts which is now handled by mailroom - * Start broadcast mailroom tasks with HIGH_PRIORITY - * Fix EX settings page load - * Migration to convert any remaining orgs to use mailroom - * Fix broken links to webhook docs - * Simplify WebHookEvent model - -v4.23.3 ----------- - * Send broadcasts through mailroom - * Add org name in the email subject for exports - * Add org name in export filename - -v4.24.0 ----------- - * Add org name in the export email subject and filename - * Update flow editor to 1.5.9 - * Remove functionality for handling legacy surveyor submissions - -v4.23.1 ----------- - * Make exported fields match goflow representation and add .as_export_ref() to exportable classes - * Update to latest floweditor v1.5.5 - * Persist group and field definitions in exports - * Add support for SignalWire (https://signalwire.com) for SMS and IVR - -v4.23.0 ----------- - * Save channel and message label dependencies on flows - -v4.22.63 ----------- - * Update to latest floweditor v1.5.5 - * Allow switching between editors - * Update Django to version 2.1.9 - -v4.22.62 ----------- - * add US/ timezones for clicksend as well - -v4.22.61 ----------- - * add clicksend channel type - -v4.22.60 ----------- - * Update flow editor to 1.5.4 - * Allow imports and exports of v13 flows - -v4.22.55 ----------- - * Enable export of new flows - * Update Nexmo supported countries list - -v4.22.54 ----------- - * rename migration, better printing - -v4.22.53 ----------- - * add migration to repopulate metadata for all flows - -v4.22.52 ----------- - * Expose result specs in flow metadata on flows API endpoint - * Use Temba JSON adapter when reading JSON data from DB - * Don't update TwiML channel when claiming it - * Use most recent topup for credit transfers between orgs - -v4.22.51 ----------- - * Update to flow-editor 1.5.3 - -v4.22.50 ----------- - * Update to floweditor v1.5.2 - -v4.22.49 ----------- - * Only do mailroom validation on new flows - -v4.22.48 ----------- - * Fix 11.12 migration and importing flows when flow contains a reference to a channel in a different org - * Make WhatsApp endpoint configurable, either FB or self-hosted - -v4.22.47 ----------- - * tweak to WA language mapping - -v4.22.46 ----------- - * add hormuud channel type - * newest editor - * update invitation secret when user is re-invited - -v4.22.45 ----------- - * Tweak compress for vendor - -v4.22.44 ----------- - * Update to flow editor 1.4.18 - * Add mailroom endpoints for functions, tweak styles for selection - * Honor is_active when creating contact fields - * Cache busting for flow editor - -v4.22.43 ----------- - * Update flow editor to 1.4.17 - * Warn users when starting a flow when they have a WhatsApp channel that they should use templates - -v4.22.42 ----------- - * add page to view synched WhatsApp templates for a channel - -v4.22.41 ----------- - * Update flow editor to 1.4.16 - * View absolute attachments in old editor - -v4.22.40 ----------- - * Update editor to 1.4.14 - -v4.22.39 ----------- - * latest editor - -v4.22.38 ----------- - * update defs with db values both when writing and reading - * remove clearing of external ids for messages - -v4.22.37 ----------- - * Update to flow-editor 1.4.12 - * Remove footer gap on new editor - -v4.22.36 ----------- - * allow Alpha users to build flows in new editor - * don't use RuleSets in figuring results, exports, categories - -v4.22.28 ----------- - * Adjust `!=` search operator to include unset data - * Remove broadcast recipients table - * IMPORTANT * You must make sure that all purged broadcasts have been archived using - rp-archiver v1.0.2 before deploying this version of RapidPro - -v4.22.27 ----------- - * styling tweaks to contacts page - -v4.22.26 ----------- - * Always show featured ContactFields on Contact.read page - * Do not migrate ruleset with label null and action msg text null - -v4.22.25 ----------- - * only show pagination warning when we have more than 10k results - -v4.22.24 ----------- - * support != search operator - -v4.22.23 ----------- - * simplify squashing of squashable models - * show a notification when users open the last page of the search - * update `modified_on` once msgs export is finished - -v4.22.22 ----------- - * Fix issue with pagination when editing custom fields - -v4.22.21 ----------- - * Add new page for contact field management - -v4.22.20 ----------- - * add management command to reactivate fb channels - -v4.22.19 ----------- - * api for templates, add access token and fb user id to claim, sync with facebook endpoint - -v4.22.18 ----------- - * fix recalculating event fires for fields when that field is created_on - -v4.22.17 ----------- - * Don't overwrite show_in_table flag on contact import - * Prevent updates of contact field labels when adding a field to a flow - * Add migration to populate results and waiting_exit_uuids in Flow.metadata - -v4.22.15 ----------- - * Do not immediately expire flow when updating expirations (leave that to mailroom) - * Fix boundary aliases duplicates creation - * Add org lock for users to deal with similtaneous updates of org users - * Add results and waiting_exit_uuids to flow metadata and start populating on Flow.update - -v4.22.14 ----------- - * CreateSubOrg needs to be non-atomic as well as it creates flows which need to be validated - * Remove unused download view - -v4.22.13 ----------- - * allow blank pack, update permissions - -v4.22.12 ----------- - * remove APK read view, only have update - * allow setting pack number - -v4.22.11 ----------- - * Add APK app and new Android claiming pipeline for Android Relayer - -v4.22.10 ----------- - * Use output of flow validation in mailroom to set flow dependencies - * Make message_actions.json API endpoint support partial updates - * Log to librato only pending messages older than a minute - -v4.22.6 ----------- - * Add Viber Welcome Message event type and config - * More customer support service buttons - -v4.22.5 ----------- - * queue incoming messages and incoming calls from relayer to mailroom - -v4.22.4 ----------- - * Temporarily disable flow validation until we can fix it for new orgs - -v4.22.3 ----------- - * Lazily create any dependent objects when we save - * MAILROOM_URL in settings.py.dev should default to http://localhost:8090 - * Call to mailroom to validate a flow before saving a new definition (and fix invalid flows in our tests) - -v4.22.2 ----------- - * Fix schedule next fire calculation bug when schedule is greater than number of days - * Fix to allow archiving flow for removed(inactive) campaign events - * Strip resthook slug during creation - * Ignore request from old android clients using GCM - -v4.22.1 ----------- - * Increase the schedule broadcast text max length to be consistent on the form - -v4.22.0 ----------- - * Fix case of single node flow with invalid channel reference - * Remove ChannelConnection.created_by and ChannelConnection.is_active - * Fix flow export results to include results from replaced rulesets - -v4.21.15 ----------- - * correct exclusion - -v4.21.14 ----------- - * Dont requeue flow server enabled msgs - * Exit sessions in bulk exit, ignore mailroom flow starts - -v4.21.13 ----------- - * Fix import with invalid channel reference - * Add flow migration to remove actions with invalid channel reference - -v4.21.12 ----------- - * improve simulator for goflow simulation - -v4.21.11 ----------- - * work around JS split to show simulator images - -v4.21.10 ----------- - * display attachments that are just 'image:' - -v4.21.9 ----------- - * simulator tweaks - * show Django warning if mailroom URL not configured - -v4.21.8 ----------- - * make sure we save flow_server_enabled in initialize - -v4.21.7 ----------- - * Update status demo view to match the current webhook posted data - * Remove all remaining reads of contact.is_test - -v4.21.6 ----------- - * Use pretty datetime on contact page for upcoming events - -v4.21.5 ----------- - * Replace final index which references contact.is_test - * Fix labels remap on flow import - -v4.21.4 ----------- - * All new orgs flow server enabled - * Fallback to org domain when no channe domain set - -v4.21.3 ----------- - * Remove all remaining checks of is_test, except where used in queries - * Update contact indexes to not include is_test - * Prevent users from updating dynamic groups if query is invalid - * Update Python module dependencies - -v4.21.2 ----------- - * set country code on test channel - -v4.21.1 ----------- - * do not log errors for more common exceptions - -v4.21.0 ----------- - * Include fake channel asset when simulating - * Add test for event retrying, fix out of date model - * Stop checking contact.is_test in db triggers - -v4.20.1 ----------- - * Remove unused fields on webhookevent - * Default page title when contact has no name or URN (e.g. a surveyor contact) - -v4.19.7 ----------- - * fix simulator to allow fields with empty value - * remove remaining usages of test contacts for testing - -v4.19.6 ----------- - * add incoming_extra flow to mailroom test - * fix for test contact deletion migration - -v4.19.5 ----------- - * pass extra to mailroom start task - -v4.19.4 ----------- - * Support audio/mp4 as playable audio - * Add migration to remove test contacts - -v4.19.3 ----------- - * Ensure scheduled triggers start flows in mailroom if enabled - -v4.19.2 ----------- - * remap incoming ivr endpoints for Twilio channels when enabling flow server - * interrupt flow runs when enabling flow server - * add enable_flow_server method to org, call in org update view - -v4.19.1 ----------- - * Scope API throttling by org and user - * Add export link on campaign read page - * Fix SMTP serever config to percentage encode slashes - -v4.19.0 ----------- - * Add session_type field on FlowSession - * Use provided flow definition when simulating if provided - * Remove USSD app completely - * Adjust broadcast status to API endpoint - * Remove legacy (non-mailroom) simulation - -v4.18.0 ----------- - * Make ChannelConnection.is_active nullable so it can be eventually removed - * Replace traceback.print_exc() with logger.error - * Make sure contacts ids are iterable when starting a flow - * Remove USSD proxy model - -v4.17.0 ----------- - * Use URL kwargs for channel logs list to pass the channel uuid - * Fix message campaign events on normal flows not being skipped - * Default to month first date format for US timezones - * Make Contact.created_by nullable - * Fix to prevent campaign event to create empty translations - * Use new editor wrapper to embed instead of building - * Remove USSD functionality from engine - -v4.16.15 ----------- - * Fix Stripe integration - -v4.16.14 ----------- - * fix webhook bodies to be json - -v4.16.13 ----------- - * better request logging for webhook results - -v4.16.12 ----------- - * further simplication of webhook result model, add new read and list pages - -v4.16.11 ----------- - * add org field to webhook results - -v4.16.10 ----------- - * Add surveyor content in mailroom_db command - * Fix flows with missing flow_type - * Update more Python dependencies - * Prevent flows of one modality from starting subflows of a different modality - -v4.16.8 ----------- - * Add support for Movile/Wavy channels - * Switch to codecov for code coverage - * Allow overriding brand domain via env - * Add mailroom_db management command for mailroom tests - * Start flow_server_enabled ivr flows in mailroom - * Remove legacty channel sending code - * Remove flow dependencies when deactivating USSD flows - * Migrations to deactivate USSD content - -v4.16.5 ----------- - * Fix quick replies in simulator - -v4.16.4 ----------- - * More teaks to Bongolive channel - * Use mailroom simulation for IVR and Surveyor flows - * Add a way to see all run on flow results runs table - -v4.16.3 ----------- - * Simplify generation of upload URLs with new STORAGE_URL setting - -v4.16.2 ----------- - * Switch BL channels used API - * Fix rendering of attachments for mailroom simulation - * Update black to the version 18.9b0 - -v4.16.0 ----------- - * Fix flow_entered event name in simulator - * Make created_by, modified_by on FlowStart nullable, add connections M2M on FlowStart - * Rename ChannelSession to ChannelConnection - -v4.15.2 ----------- - * Fix for flow dependency migration - * Fix rendering of single digit hours in pretty_datetime tag - * Use mailroom for flow migration instead of goflow - * Add support for Bongo Live channel type - -v4.15.1 ----------- - * Include default country in serialized environments used for simulation - * Add short_datetime and pretty_datetime tags which format based on org settings - * Prevent users from choosing flow they are editing in some cases - -v4.15.0 ----------- - * Fix nexmo claim - * Tweak 11.7 migration to not blow up if webhook action has empty URL - * Bump module minor versions and remove unused modules - * Remove ChannelSession.modified_by - -v4.14.1 ----------- - * Make older flow migrations more fault tolerant - * Tweaks to migrate_flows command to make error reporting more useful - * Add flow migration to fix duplicate rule UUIDs - * Update python-telegram-bot to 11.1.0 - * Update nexmo to 2.3.0 - -v4.14.0 ----------- - * Fix recent messages rollover with 0 messages - * Use flowserver only for flow migration - * Make created_by and modified_by optional on channel session - -v4.13.2 ----------- - * create empty revisions for empty flows - * proper handle of empty errors on index page - * fix error for policy read URL failing - * add quick replies to mailroom simulator - -v4.13.1 ----------- - * populate simulator environment for triggers and resumes - * honour Flow.is_active on the Web view - * fix android channel release to not throw if no FCM ID - * add Play Mobile aggregator - -v4.13.0 ----------- - * Add index for fast Android channel fetch by last seen - * Remove gcm_id field - * No messages sheet for flow results export on anon orgs - * Add periodic task to sync channels we have not seen for a while - * Add wait_started_on field to flow session - -v4.12.6 ----------- - * Remove flow server trialling - * Replace tab characters for GSM7 - * Use mailroom on messaging flows for simulation - * Raise ValidationError for ContactFields with null chars - * upgrade to Django 2.1 - -v4.12.5 ----------- - * Make sure Flow.update clears prefetched nodes after potentialy deleting them - -v4.12.4 ----------- - * Fix Flow.update not deleting nodes properly when they change type - -v4.12.3 ----------- - * Add try/except block on FCM sync - * Issue #828, remove numbers replace - -v4.12.2 ----------- - * Dont show queued scheduled broadcasts in outbox - * Prevent deleting groups with active campaigns - * Activate support for media attachment for Twitter channels - * Remove ability to create webhook actions in editor - * Add flow migration to replace webhook actions with rulesets - -v4.12.1 ----------- - * Fix importing campaign events based on created_om - * Fix event fires creation for immutable fields - * Remove WA status endpoint - * Fix IVR runs expiration date initialization - * Add UUID field to org - -v4.11.7 ----------- - * Interrupt old IVR calls and related flow sessions - * Move webhook docs button from the token view to the webhook view - -v4.11.6 ----------- - * Faster squashing - * Fix EX bulk sender form fields - -v4.11.5 ----------- - * simulate flow_server_enabled flows in mailroom - -v4.11.3 ----------- - * Add session log links to contact history for staff users - * Hide old webhook config page if not yet set - -v4.11.2 ----------- - * Fix passing false/true to archived param of flows API endpoint - -v4.11.1 ----------- - * Turn on the attachment support for VP channels - * Tweak 11.6 flow migration so that we remap groups, but never create them - * Flows API endpoint should support filtering by archived and type - * Log how many flow sessions are deleted and the time taken - * Turn on the attachment support for WA channels - * Adjust UI for adding quick replies and attachment in random order - -v4.11.0 ----------- - * Add index for fetching waiting sessions by contact - * Ensure test_db users have same username and email - * Add index to FlowSession.ended_on - * Make FlowSession.created_on non-null - * Add warning class to skipped campaigns event fire on contact history - * Add fired_result field to campaign event fires - -v4.10.9 ----------- - * Log and fail calls that cannot be started - * Allow contact.created_on in flows, init new event - -v4.10.8 ----------- - * Deactivate events when updating campaigns - * Less aggressive event fire recreation - * Use SMTP SERVER org config and migrate old config keys - -v4.10.4 ----------- - * Retry failed IVR calls - -v4.10.3 ----------- - * Show all split types on run results, use elastic for searching - -v4.10.2 ----------- - * Flow migration for mismatched group uuids in existing flows - * Remap group uuids on flow import - * Migration to backfill FlowSession.created_on / ended_on - -v4.10.1 ----------- - * Add config to specify content that should be present in the response of the request, if not mark that as msg failed - * Allow campaign events to be skipped if contacts already active in flows - -v4.10.0 ----------- - * Add FlowRun.parent_uuid - * Add FlowSession.timeout_on - * Create new flows with flow_server_enabled when org is enabled - * Add flow-server-enabled to org, dont deal with flow server enabled timeouts or expirations on rapidpro - -v4.9.2 ----------- - * Fix flowserver resume tests by including modified_on on runs sent to goflow - -v4.9.1 ----------- - * Dont set preferred channels if they can't send or call - * Don't assume events from goflow have step_uuid - * Add indexes for flow node and category count squashing - -v4.9.0 ----------- - * Delete event fires in bulk for inactive events - * Fix using contact language for categories when it's not a valid org language - * Fix translation of quick replies - * Add FlowSession.current_flow and start populating - * Refresh contacts list page after managing fields - * Update to latest goflow (no more caller events, resumes, etc) - * Fix flow results export to read old archive format - * Batch event fires by event ID and not by flow ID - * Make campaign events immutable - -v4.8.1 ----------- - * Add novo channel - -v4.8.0 ----------- - * Remove trialing of campaign events - * Remove no longer used ruleset_analytis.haml - * Expose @contact.created_on in expressions - * Make Contact.modified_by nullable and stop writing to it - * Optimize group releases - * Add created_on/ended_on to FlowSession - -v4.7.0 ----------- - * Bump Smartmin and Django versions - * Expose @contact.created_on in expressions - * Make Contact.modified_by nullable and stop writing to it - -v4.6.0 ----------- - * Latest goflow - -v4.5.2 ----------- - * Add config for deduping messages - * Add created_on/ended_on to FlowSession - * Update to latest goflow (event changes) - * Do not delete campaign events, deactivate them - * Do not delete runs when deleting a flow - * Fix Campaigns events delete for system flow - -v4.5.1 ----------- - * Use constants for queue names and switch single contact flow starts to use the handler queue - * Raise ValidationError if flow.extra is not a valid JSON - * Defer group.release in a background task - * Fix saving dynamic groups by reverting back to escapejs for contact group query on dialog - -v4.5.0 ----------- - * Add Stopped event to message history and unknown/unsupported events - * Switch result value to be status code from webhook rulesets, save body as @extra. and migrate result references to that - -v4.4.20 ----------- - * Fix channel selection for sending to TEL_SCHEME - * Add campaigns to all test orgs for make_db - * Correctly embed JS in templates - * Escape data before using `mark_safe` - -v4.4.19 ----------- - * Fix validating URNField when input isn't a string - -v4.4.18 ----------- - * Fix incorrect units in wehbook_stats - * Result input should always be a string - -v4.4.17 ----------- - * Don't do duplicate message check for surveyor messages which are already SENT - * Update to goflow 0.15.1 - * Update Location URLs to work with GADM IDs - * Fix potential XSS issue: embed script only if `View.refresh` is set - -v4.4.16 ----------- - * Fix IVR simulation - -v4.4.15 ----------- - * Fix importing with Created On columns - * Validate URNs during import - * Classify flow server trials as simple if they don't have subflows etc - * Use latest goflow for testing - -v4.4.14 ----------- - * Enable import of GADM data using import_geojson - -v4.4.13 ----------- - * Defer to mailroom for processing event fires for flows that are flowserver enabled - * Tweaks to comparing events during flow server trials - * Fix saved operand for group tests on anon orgs - -v4.4.12 ----------- - * Add step URN editor completions - * Add name to the channels shown on the flow editor - * Don't zero pad anon ids in context - * Update to latest expressions - -v4.4.11 ----------- - * Ensure API v1 writes are atomic - * JSONFields should use our JSON encoder - * Use authenticated user for events on Org.signup - * Trial shouldn't blow up if run has no events - * Add urn to step/message context and make urn scheme accessible for anon org - * Get rid of Flow.FLOW - -v4.4.8 ----------- - * Don't trial flow starts from triggers - * Fix messages from non-interactive subflows being added to their parent run - * Setup user tracking before creating an Org - * Migrate flows during flowserver trials with collapse_exits=false to keep paths exactly the same - * Input for a webhook result test should be a single request - * Migration to update F type flows to M - -v4.4.7 ----------- - * Enforce validation on OrgSignup and OrgGrant forms - * Cleanup encoding of datetimes in JSON - * New flows should be created with type M and rename constants for clarity - -v4.4.6 ----------- - * Fix updating dynamic groups on contact update from the UI - * Make editor agnostic to F/M flow types - -v4.4.5 ----------- - * Remove mage functionality - * Fix Twilio number searching - -v4.4.2 ----------- - * Use SystemContactFields for Dynamic Groups - * Add our own json module for loads, dumps, always preserve decimals and ordering - * Replace reads of Flow.flow_type=MESSAGE with Flow.is_system=True - * Migration to populate Flow.is_system based on flow_type - -v4.4.0 ----------- - * Fix intercom ResourceNotFound on Org.Signup - * Remove follow triggers and channel events - * Add Flow.is_system and start populating for new campaign event single message flows - -v4.3.8 ----------- - * Data migration to deactivate all old style Twitter channels - * Update Nexmo client - -v4.3.4 ----------- - * Increase IVR logging verbosity - * Trial all campaign message flows in flowserver - * Tweak android recommendation - -v4.3.3 ----------- - * Run Table should only exclude the referenced run, and include greater Ids - * Raise validation error ehen trying action inactive contacts over API - * Remove uservoice as a dependency - * Update versions of Celery, Postgis, Nexmo, Twilio - * Fix Python 3.7 issues - * Clear out archive org directory when full releasing orgs - -v4.3.2 ----------- - * Update expressions library to get EPOCH() function - -v4.3.1 ----------- - * Update to Django 2.0 - * Update postgres adapter to use psycopg2-binary - -v4.3.0 ----------- - * Wrap asset responses in a results object - * Use trigger type of campaign when starting campign event flows in flowserver - * Fix count for blocktrans to not use string from intcomma - * Use audio/mp4 content type for m4a files - -v4.2.4 ----------- - * Update to latest goflow and enable asset caching - * Actually fix uploading mp4 files - -v4.2.2 ----------- - * Show only user fields when updating field values for a contact - * Fix MIME type for M4A files - * Allow test_db command to work without having ES installed - -v4.2.1 ----------- - * Ignore search exceptions in omnibox - * Actually enable users to use system contact fields in campaign events - -v4.2.0 ----------- - * Enable users to choose 'system fields' like created_on for campaign events - -v4.1.0 ----------- - * Management commnd to recalculate node counts - * Fix run path triggers when paths are trimmed - * Allow file overwrite for public S3 uploads - -v4.0.3 ----------- - * Handle cases when surveyor submits run with deleted action set - * Document modified_on on our API endpoint - * Use ElasticSearch for the omnibox widget - -v4.0.2 ----------- - * fix count of suborgs after org deletion - -v4.0.1 ----------- - * remove group settings call for WhatsApp which is no longer supported - * easier way to service flows for CS reps - -v4.0.0 ----------- - * Squash all migrations - -v3.0.1000 ----------- - * fix display of archives formax on home page - -v3.0.999 ----------- - * Fix chatbase font icon name - * Add encoding config to EX channel type - * Show archive link and information on org page - -v3.0.449 ----------- - * Improve error message when saving surveyor run fails - * Allow surveyor submissions to match rules on old revisions - * Fix bug in msg export from archives - -v3.0.448 ----------- - * Support audio attachments in all the audio formats that we can play - * Add name and input to runs API v2 endpoint - * Update InGroup test to match latest goflow - * Expose resthooks over the assets endpoint and update logic to match new engine - * Support messages export from archives - -v3.0.447 ----------- - * Configure Celery to discover Wechat and Whatsapp tasks - * Add Rwanda and Nigeria to AT claim form options - * Extend timeout for archives links to 24h - * Add created_on to the contact export - -v3.0.446 ----------- - * Use constants for max contact fields and max group membership columns - * Tweaks to twitter activity claiming that deals with webhooks already being claimed, shows errors etc - * Rename form field to be consistent with the constants we use - * Writes only now use XLSLite, more coverage - * Limit number of groups for group memberships in results exports - * Swicth message export to use XLSLite - * Fix default ACL value for S3 files - * Add WeChat (for beta users) - -v3.0.445 ----------- - * fix dupe sends in broadcast action - -v3.0.444 ----------- - * fix per credit calculation - -v3.0.443 ----------- - * two decimals for per credit costs, remove trailing 0s - -v3.0.442 ----------- - * Fix ContactField priority on filtered groups - * Update Django to version 1.11.14 - * Reenable group broadcasts - -v3.0.438 ----------- - * When comparsing msg events in flowserver trials, make paths relative again - * Change VariableContactAction to create contacts even without URNs - * Fix import of ID columns from anon export - * Don't fail twilio channel releases if auth key is no longer vaild - * Add UI messaging for archived data - -v3.0.437 ----------- - * Fix import of header ID from anon export - -v3.0.436 ----------- - * Fix supported scheme display lookup - * Move action log delete to flow run release - -v3.0.435 ----------- - * Fix group test operand when contact name is null - * Mention all AfricasTalking countries on claim page - * Warn user of columns to remove on import - * Release events properly on campaign import - * Add languages endpoint to asset server - -v3.0.434 ----------- - * Add option for two day run expiration - * Change group rulesets to use contact as operand same as new engine - * Fix reconstructing sessions for runs being trialled in the flowserver so that we include all session runs - -v3.0.433 ----------- - * Write boolean natively when exporting to xlsx - * Improve reporting of flow server errors during trials - * Clarify about contact import columns - * Update flow result exports to match recent changes to contact exports - -v3.0.432 ----------- - * Update modified_on on contacts that have their URN stolen - * Full releasing of orgs and users - -v3.0.431 ----------- - * Set exit_uuid at end of path when run completes - * Make twitter activity API the default twitter channel type - * Add Nigeria and Rwanda to AT supported countries - * Don't exclude result input from flowserver trial result comparisons - * Use operand rather than msg text for result input - * Remove reporting to sentry when @flow.foo.text doesn't equal @step.text - * Add flow migration to replace @flow.foo.text expressions on non-waiting rulesets - -v3.0.430 ----------- - * Fix message flow updating - -v3.0.429 ----------- - * Remove org.is_purgeable - * Fix format of archived run json to match latest rp-archiver - * Fix checking of result.text values in the context - * Import/Export column headers with type prefixes - * Add groups membership to contacts exports - * Retry calls that are in IVRCall.RETRY_CALL - * Retry IVR outgoing calls if contact did not answer - -v3.0.428 ----------- - * Add FlowRun.modified_on to results exports - * Change how we select archives for use in run exports to avoid race conditions - * Report to sentry when @flow.foo.text doesn't match @step.text - -v3.0.427 ----------- - * Release webhook events on run release - * Fetch run results from archives when exporting results - * Don't create action logs for non-test contacts - -v3.0.426 ----------- - * Migrations for FK protects, including all SmartModels - * Update to latest xlsxlite to fix exporting date fields - * Remove merged runs sheet from results exports - * Modified the key used in the transferto API call - -v3.0.425 ----------- - * Enable burst sms type - -v3.0.424 ----------- - * add burst sms channel type (Australia and New Zealand) - -v3.0.423 ----------- - * trim event fires every 15 minutes - -v3.0.422 ----------- - * Trim event fires older than a certain age - * More consistent name of date field on archive model - * Remove no longer needed functionality for runs that don't have child_context/parent_context set - -v3.0.421 ----------- - * Degroup contacts on deactivate - -v3.0.420 ----------- - * release sessions on reclaimed urns - -v3.0.419 ----------- - * special case deleted scheme in urn parsing - * release urn messages when releasing a contact - * add delete reason to run - -v3.0.418 ----------- - * Clear child run parent reference when releasing parent - * Make sync events release their alerts - * Release sessions, anonymize urns - -v3.0.417 ----------- - * add protect to contacts and flows, you can fake the migrations in this release - -v3.0.416 ----------- - * add deletion_date, use full path as link name - * add unique constraint to disallow dupe archives - -v3.0.415 ----------- - * add needs_deletion field, remove is_purged - -v3.0.414 ----------- - * Set run.child_context when child has no waits - * Use latest openpyxl and log the errors to sentry - * Don't blow up if trialled run has no events - * Allow editors to see archives / api - * Migration to backfill run parent_context and child_context - -v3.0.412 ----------- - * Fix archive filter test - * Include id when serializing contacts for goflow - -v3.0.411 ----------- - * Show when build failed becuse black was not executed - * Fix calculation of low threshold for credits to consider only the top with unused credits - * All flows with subflows to be trialled in the flowserver - * Create webhook mocks for use in flowserver trials from webhook results - * Enable Archive list API endpoint - -v3.0.410 ----------- - * Remove purging, add release with delete_reason - * Set parent_context in Flow.start and use it in FlowRun.build_expressions_context if available - * Add is_archived counts for LabelCounts and SystemLabelCounts, update triggers - -v3.0.409 ----------- - * Remove explicit use of uservoice - * Use step_uuids for recent message calculation - -v3.0.408 ----------- - * Format code with blackify - * Add management commands to update consent status and org membership - * Update to latest goflow to fix tests - * Fix 'raise None' in migration and make flow server trial period be 15 seconds - * Fix the campaign events fields to be datetime fields - * Move flow server stuff from utils.goflow to flows.server - * Add messangi channel type - -v3.0.407 ----------- - * Reenable requiring policy consent - * Allow msgs endpoint to return ALL messages for an org sorted by created_on - * Return error message if non-existent asset requested from assets endpoint - * If contact sends message whilst being started in a flow, don't blow up - * Remove option to have a flow never expire, migrate current flows with never to 30 days instead - * Request the user to fill the LINE channel ID and channel name on the claim form - -v3.0.406 ----------- - * Fix logging events to intercom - -v3.0.405 ----------- - * Migration to remove FlowStep - -v3.0.404 ----------- - * remove old privacy page in favor of new policy app - * use python3 `super` method - * migration to backfill step UUIDs on recent runs - -v3.0.403 ----------- - * tweaks to add_analytics users - -v3.0.402 ----------- - * add native intercom support, add management command to update all users - -v3.0.401 ----------- - * Fix quick replies in simulator - * Lower the min length for Facebook page access token - * Update Facebook claim to ask for Page ID and Page name from the user - * Add new policies and consent app - * Fix another migration that adds a field and writes to it in same transaction - * Add step UUID fields to FlowPathRecentRun and update trigger on run paths to start populating them - -v3.0.400 ----------- - * Don't create flow steps - * Remove remaining usages of six - -v3.0.399 ----------- - * Drop no longer used FlowRun.message_ids field - * Don't allow nested flowserver trials - * Fix migrations which can lead to locks because they add a field and populate it in same transaction - * Remove a lot of six stuff - * Use bulk_create's returned msgs instead of forcing created_on to be same for batches of messages created by Broadcast.send - * Use sent_on for incoming messages's real world time - * Don't require steps for flow resumptions - -v3.0.398 ----------- - * Add period, rollup fields to archive - -v3.0.397 ----------- - * Stop writing .recipients when sending broadcasts as this is only needed for purged broadcasts - * Rework run_audit command to check JSON fields and not worry about steps - * Replace json_date_to_datetime with iso8601.parse_date - * Stepless surveyor runs - -v3.0.396 ----------- - * Use run path instead of steps to recalculate run expirations - * Stop writing to FlowRun.message_ids - -v3.0.395 ----------- - * Change FlowRun.get_last_msg to use message events instead of FlowRun.message_ids - * Stop saving message associations with steps - -v3.0.393 ----------- - * Drop values_value - -v3.0.392 ----------- - * Remove broadcast purging - -v3.0.391 ----------- - * remove reference to nyaruka for trackings users - * fix test decoration to work when no flow server configured - -v3.0.390 ----------- - * Disable webhook calls during flowserver trials - * Use FlowRun.events for recent messages rollovers - -v3.0.389 ----------- - * add archive model, migrations - -v3.0.388 ----------- - * Make ContactField header clickable when sorting - * Add first python2 incompatible code change - * Add contact groups sheet on contact exports - * Remove contact export as CSV - * Update to latest goflow - * Fix test_db contact fields serialization - -v3.0.387 ----------- - * fix flowstarts migration - -v3.0.386 ----------- - * update start contact migration to work with malformed extra - -v3.0.384 ----------- - * fix not selecting contact id from ES in canary task - -v3.0.383 ----------- - * add canary task for elasticsearch - * record metrics about flowserver trial to librarto - * allow sorting of contact fields via dragging in manage dialog - -v3.0.382 ----------- - * rename flow migration - -v3.0.381 ----------- - * limit number of flows exited at once, order by expired_on to encourage index - * remove python 2.7 build target in travis - * start flow starts in the flows queue vs our global celery one - * add flow start count model to track # of runs in a flow start - * Always use channel.name for channel assets - -v3.0.380 ----------- - * update to latest goflow to get location support - * better output logs for goflow differences - -v3.0.379 ----------- - * add v2 editor through /v2 command in simulator - -v3.0.378 ----------- - * get all possible existing Twilio numbers on the Twilio account - * reenable group sends * - * remove Value model usage, Contact.search - -v3.0.377 ----------- - * do not allow dupe broadcasts to groups - * Use ElasticSearch to export contacts and create dynamic groups - * remove celery super auto scaler - * update whatsapp activation by setting rate limits using new endpoints - * fix incorrect keys for tokens and account sids for twiml apps - * add ability to test flow results against goflow - -v3.0.376 ----------- - * remove celery super auto scaler since we don't use it anywhere - * update whatsapp activation by setting rate limits using new endpoints - * fix incorrect keys for tokens and account sids for twiml apps - * add admin command to help audit ES and DB discrepencies - -v3.0.375 ----------- - * update whatsapp for new API - * new index on contacts_contact.fields optimized for space - -v3.0.374 ----------- - * allow reading, just not writing of sends with groups - * remove old seaching from contact views - -v3.0.373 ----------- - * optimize group views - * don't allow sends to groups to be imported or copied - * remove normal junebug, keep only junebug ussd - * fix isset/~isset, sort by 'modified_on_mu' in ES - * use ES to search for contacts - -v3.0.372 ----------- - * remap sms and status Twilio urls, log people still calling old ones - * fix to display Export buttons on sent msgs folder and failed msgs folder - * use message events in run.events for results exports instead of run.message_ids - -v3.0.371 ----------- - * add twilio messaging handling back in - -v3.0.370 ----------- - * remove logging of base handler being called - -v3.0.369 ----------- - * rename contact field types of decimal to number - * finalize contact imports so that updated contacts have modified_on outside transaction - * try to fetch IVR recordings for up to a minute before giving up - * remove handling and sendind code for all channel types (except twitter and junebug) - -v3.0.368 ----------- - * Fewer sentry errors from ES searching - * Don't assume messages have a UUID in FlowRun.add_messages - -v3.0.367 ----------- - * allow up to two minutes for elastic search lag - -v3.0.366 ----------- - * fix empty queryset case for ES comparison - -v3.0.365 ----------- - * chill the f out with sentry if the first contact in our queryset is less than 30 seconds old - * fix duplicate messages when searching on msgs whose contacts have more than one urn - -v3.0.364 ----------- - * fix environment variable for elastic search, catch all exceptions - -v3.0.363 ----------- - * Add Elastic searching for contacts, for now only validating that results through ES are the same as through postgres searches - -v3.0.361 ----------- - * Migrate Dart/Hub9 Contact urns and channels to support ext schemes - -v3.0.360 ----------- - * Use more efficient queries for check channels task - * Fix Location geojson import - -v3.0.359 ----------- - * Add API endpoint to view failed messages - -v3.0.358 ----------- - * Allow filtering by uuid on runs API endpoint, and include run uuid in webhooks - * Fix blockstrans failing on label count - -v3.0.357 ----------- - * Add linear backdown for our refresh rate on inbox pages - -v3.0.356 ----------- - * Do not log MageHandler calls - * Serialize contact field label as name instead - -v3.0.355 ----------- - * Use force_text on uuids read from redis - * Log errors for any channel handler methods - -v3.0.354 ----------- - * Set placeholder msg.id = 0 - * Fix comparison when price is None - -v3.0.353 ----------- - * Evaluate contact field with no value as False - -v3.0.352 ----------- - * Update to Facebook graph api v2.12 - -v3.0.351 ----------- - * Support plain ISO dates (not just datetimes) - -v3.0.350 ----------- - * Swallow exceptions encountered when parsing, don't add to group - * Set placeholder msg.id = 0 - -v3.0.349 ----------- - * Deal with null state values in contact search evaluation - -v3.0.348 ----------- - * Fix off by one error in calculating best channel based on prefixes - * Reevaluate dynamic groups using local contact fields instead of SQL - -v3.0.347 ----------- - * Add modified_on index for elasticsearch - -v3.0.346 ----------- - * Don't start archived flows - * Don't show stale dates on campaign events - * Allow brands to configure flow types - * Remove group search from send to others action - * Fixes for test contact activity - -v3.0.345 ----------- - * Migration to backfill run.events and add step uuids to run.path - * Do the right thing when we are presented with NaN decimals - -v3.0.344 ----------- - * Use real JSONField for FlowRun.events - * Add FlowRun.events and start populating with msg events for new runs - * Serialize Contact.fields in test_db - * Update to latest goflow release - -v3.0.342 ----------- - * Fix for decimal values in JSON fields attribute - * Fix for not being able to change contact field types if campaign event inactive - -v3.0.341 ----------- - * Add if not exists to index creation for fields - * Last of Py3 compatibility changes - -v3.0.340 ----------- - * Use fields JSON field on Contact instead of Value table for all reading. - * Force campaign events to be based off of DateTime fields - * Migration to change all contact fields used in campaign events to DateTime - * Migration to add GIN index on Contact.fields - -v3.0.339 ----------- - * Remove leading and trailing spaces on location string before boundaries path query - * Require use of update_fields with Contact.save() - * Event time of contact_changed is when contact was modified - * Use latest goflow release - * Make special channel accessible during simulator use - -v3.0.338 ----------- - * Always serialize contact field datetime values in the org timezone - * Add migration for population of the contact field json - -v3.0.336 ----------- - * Update middlewares to Django defaults for security - * Add JSON fields to Contact, set in set_field - * backfill any null location paths, make not null, update import to set path, set other levels on fields when setting location - -v3.0.335 ----------- - * Allow groups when scheduling flows or triggers - * Fix configuration page URLs and use courier URLs - * Replace contact.channel in goflow serialization with a channel query param in each contact URN - * Serialize contact.group_uuids as groups with name and UUID - -v3.0.334 ----------- - * Add response to external ID to courier serialized msg if we have response to - * More Py3 migration work - * Remove broadcasting to groups from Send Message dialog - -v3.0.332 ----------- - * Do not delete RuleSets only disconnect them from flows - -v3.0.331 ----------- - * Fix scoping for sim show/hide - -v3.0.330 ----------- - * Allow toggling of new engine on demand with /v2 command in simulator - -v3.0.329 ----------- - * Fix negative cache ttl for topups - -v3.0.328 ----------- - * Remove Vumi Type - * Remove custom autoscaler for Celery - * Implement Plivo without Plivo library - -v3.0.325 ----------- - * Build dynamic groups in background thread - * Dynamic Channel changes, use uuids in URLs, allow custom views - * Allow WhatsApp channels to refresh contacts manually - * Allow brands to specifiy includes for the document head - * Fix external claim page, rename auth_urn for courier - * Change VB channel type to be a dynamic channel - * Remove unused templates - -v3.0.324 ----------- - * Add ability to run select flows against a flowserver instance - -v3.0.323 ----------- - * Move JioChat access creation to channel task - * Use 'list()' on python3 dict iterators - * Use analytics-python===1.2.9, python3 compatible - * Fix using PlayAction in simulator and add tests - * Fix HasEmailTest to strip surrounding punctuation - * ContainsPhraseTest shouldn't blow up if test string is empty - * Use 'six' library for urlparse, urlencode - -v3.0.322 ----------- - * Unfreeze phonenumbers library so we always use latest - * Remove old Viber VI channel type - * Add config template for LN channel type - * Move configuration blurbs to channel types - * Move to use new custom model JSONAsTextField where appropriate - -v3.0.321 ----------- - * Fix quick-reply button in flow editor - -v3.0.320 ----------- - * Fix webhook rule as first step in run interpreting msg wrong - * Change mailto URN importing to use header 'mailto' and make 'email' always a field. Rename 'mailto' fields to 'email'. - -v3.0.319 ----------- - * Add ArabiaCell channel type - * Tweaks to Mtarget channel type - * Pathfix for highcharts - -v3.0.318 ----------- - * Add input to webhook payload - -v3.0.317 ----------- - * Remove support for legacy webhook payload format - * Fix org-choose redirects for brands - -v3.0.316 ----------- - * Remove stop endpoint for MT - -v3.0.315 ----------- - * Inactive flows should not be listed on the API endpoint - * Add Mtarget channel type - -v3.0.314 ----------- - * Add run dict to default webhook payload - -v3.0.313 ----------- - * have URNs resolve to dicts instead of just the display - * order transfer credit options by name - * show dashboard link even if org is chosen - -v3.0.312 ----------- - * include contact URN in webhook payload - -v3.0.311 ----------- - * Allow exporting results of archived flows - * Update Twitter Activity channels to work with latest beta changes - * Increase maximum attachment URL length to 2048 - * Tweak contact searching so that set/not-set conditions check the type specific column - * Migration to delete value decimal/datetime instances where string value is "None" - * Don't normalize nulls in @extra as "None" - * Clear timeouts for msgs which dont have credits assigned to them - * Simpler contact get_or_create method to lookup a contact by urn and channel - * Prevent updating name for existing contact when we receive a message - * Remove fuzzy matching for ContainsTest - -v3.0.310 ----------- - * Reimplement clickatell as a Courier only channel against new API - -v3.0.309 ----------- - * Use database trigger for inserting new recent run records - * Handle stop contact channel events - * Remove no longer used FlowPathRecentRun model - -v3.0.308 ----------- -'# Enter any comments for inclusion in the CHANGELOG on this revision below, you can use markdown - * Update date for webhook change on api docs - * Don't use flow steps for calculating test contact activity - -v3.0.307 ----------- - * Stop using FlowPathRecentMessage - -v3.0.306 ----------- - * Migration to convert recent messages to recent runs - -v3.0.305 ----------- - * Add new model for tracking recent runs - * Add dynamic group optimization for new contacts - -v3.0.304 ----------- - * Drop index on FlowStep.step_uuid as it's no longer needed - -v3.0.303 ----------- - * Still queue messages for sending when interrupted by a child - -v3.0.302 ----------- - * Use FlowRun.current_node_uuid for sending to contacts at a given flow node - -v3.0.301 ----------- - * Tweak process_message_task to not blow up if message doesn't exist - * Use FlowRun.message_ids for flow result exports - -v3.0.300 ----------- - * Use config secret instead of secret field on Channel - * Add tests for datetime contact API field update - -v3.0.299 ----------- - * Fix deleting resthooks - * Fix quick replies UI on Firefox - -v3.0.298 ----------- - * Process contact queue until there's a pending message or empty - * Make date parsing much stricter - * Migration to fix run results which were numeric but parsed as dates - * Use transaction when creating contact URN - * Add support for v2 webhooks - -v3.0.294 ----------- - * Fix run.path trigger to not blow up deleting old steps that don't have exit_uuids - * Define MACHINE_HOSTNAME for librato metrics - -v3.0.293 ----------- - * Fix handle_ruleset so we don't continue the run if a child has exited us - * Migration to backfill FlowRun.message_ids and .current_node_uuid (recommend faking and running manually) - -v3.0.292 ----------- - * Add support for 'direct' db connection - * Stop updating count and triggered on on triggers - * Add FlowRun.current_node_uuid and message_ids - * Catch IntegrityError and lookup again when creating contact URN - * Make sure we dont allow group chats in whatsapp - -v3.0.291 ----------- - * Ignore TMS callbacks - -v3.0.289 ----------- - * Stop writing values in flows to values_value - -v3.0.287 ----------- - * Performance improvements and simplications to flow result exports - * Add some extra options to webhook_stats - * Migration to convert old recent message records - -v3.0.286 ----------- - * Remove incomplete path counts - -v3.0.285 ----------- - * Migrate languages on campaign events - * Rework flow path count trigger to use exit_uuid and not record incomplete segments - -v3.0.282 ----------- - * Don't import contacts with unknown iso639-3 code - * Make angular bits less goofy for quick replies and webhooks - * Add is_active index on flowrun - * Don't disassociate channels from orgs when they're released - * Include language column in Contact export - -v3.0.281 ----------- - * Set tps for nexmo and whatsapp - * Dont overwrite name when receiving a message from a contact that already exists - * Flow start performance improvements - -v3.0.280 ----------- - * Parse ISO dates followed by a period - * Optimize batch flow starts - -v3.0.279 ----------- - * Update Nexmo channels to use new Courier URLs - * Store path on AdminBoundary for faster lookups - * Serialize metata for courier tasks (quick replies support) - * Add default manager to AdminBoundary which doesn't include geometry - -v3.0.278 ----------- - * Fixes to the ISO639-3 migration - * Add support for quick replies - -v3.0.277 ----------- - * Add flow migration for base_language in flow definitions - -v3.0.276 ----------- - * back down to generic override if not found with specific code - * Add esp-spa as exception - -v3.0.275 ----------- - * Fix language migrations - -v3.0.274 ----------- - * Fix serialization of 0 decimal values in API - * Add initial version of WhatsApp channel (simple messaging only) - * Migrate to iso639-3 language codes (from iso639-2) - * Remove indexes on Msg, FlowRun and FlowStep which we don't use - * Remove fields no longer used on org model - -v3.0.273 ----------- - * Don't blow up when a flow result doesn't have input - -v3.0.272 ----------- - * Fix parsing ISO dates with negative offsets - -v3.0.271 ----------- - * Serialize contact field values with org timezone - -v3.0.270 ----------- - * Load results and path from new JSON fields instead of step/value objects on API runs endpoint - -v3.0.269 ----------- - * Fix campaign export issue - * Disable legacy analytics page - * Change date constants and contact fields to use full/canonical format in expressions context - -v3.0.265 ----------- - * Fix not updating versions on import flows - * Require FlowRun saves to use update_fields - * Rework get_results to use FlowRun.results - * Don't allow users to save dynamic groups with 'id' or 'name' attributes - * Add flow version 11.0, create migration to update references to contact fields and flow fields - -v3.0.264 ----------- - * Show summary for non-waits on flow results - * Reduce number of queries during flow handling - -v3.0.263 ----------- - * Start campaigns in separate task - * Enable flow results graphs on flow result page - * Fix run table json parsing - * SuperAutoScaler! - -v3.0.262 ----------- - * Use string comparison to optimize temba_update_flowcategorycount - * Allow path counts to be read by node or exit - * SuperAutoscaler - * Fix inbox views so we don't look up channel logs for views that don't have them - * Add management command for analyzing webhook calls - * Change recent message fetching to work with either node UUID or exit UUID - -v3.0.261 ----------- - * Migrate revisions forward with rev version - * Limit scope of squashing so we can recover from giant unsquashed numbers - -v3.0.260 ----------- - * Make tests go through migration - * Set version number of system created flows - * Block saving old versions over new versions - * Perform apply_topups as a task, tweak org update form - * Updates to credit caches to consider expiration - * Tweak credit expiration email - -v3.0.259 ----------- - * Improve performance and restartability of run.path backfill migration - * Update to latest smartmin - * Use run.results for run results page - -v3.0.258 ----------- - * Set brand domain on channel creations, use for callbacks - -v3.0.257 ----------- - * Migration to populate run paths (timeconsuming, may want to fake aand run manually) - * Ensure actions have UUIDs in single message and join-group flows - * Flow migration command shouldn't blow up if a single flow fails - -v3.0.255 ----------- - * Fix Twilio to redirect to twilio claim page after connecting Twilio - * Add FlowRun.path and start populating it for new flow steps - * Removes no longer used Msg.has_template_error field - -v3.0.254 ----------- - * Use get_host() when calculating signature for voice callbacks - -v3.0.253 ----------- - * use get_host() when validating IVR requests - -v3.0.252 ----------- - * Better Twilio channel claiming - -v3.0.250 ----------- - * Tweaks to recommended channels display - -v3.0.246 ----------- - * Update smartmin to version 1.11.4 - * Dynamic channels: Chikka, Twilio, Twilio Messaging Service and TwiML Rest API - -v3.0.245 ----------- - * Tweaks to the great FlowRun results migration for better logging and for parallel migrations - * Fixes us showing inactive orgs in nav bar and choose page - * Ignore requests missing text for incoming message from Infobip - -v3.0.244 ----------- - * Add exit_uuid to all flow action_sets (needed for goflow migrations) - -v3.0.243 ----------- - * Add index to FlowPathRecentMessage - * Flows API endpoint should filter out campaign message flow type - * Add archived field to campaings API endpoint - * Fix to correctly substitute context brand variable in dynamic channel blurb - -v3.0.242 ----------- - * Data migration to populate results on FlowRun (timeconsuming, may want to fake and run manually) - -v3.0.239 ----------- - * Migration to increase size of category count - -v3.0.238 ----------- - * Increase character limits on category counts - -v3.0.237 ----------- - * Fix Nexmo channel link - * Add results field to FlowRun and start populating - * Add FlowCategoryCount model for aggregating flow results - * Remove duplicate USSD channels section - -v3.0.234 ----------- - * Remove single message flows when events are deleted - -v3.0.233 ----------- - * Remove field dependencies on flow release, cleanup migration - * Update to latest Django 1.11.6 - -v3.0.232 ----------- - * Mage handler shouldn't be accessible using example token in settings_common - * Make Msg.has_template_error nullable and stop using it - -v3.0.231 ----------- - * Add claim page for dmark for more prettiness - * Add management command to migrate flows forward - * Add flow migration for partially localized single message flows - * Recalculate topups more often - * Add dmark channel (only can send and receive through courier) - * Merge pull request #1522 from nyaruka/headers - * Replace TEMBA_HEADERS with http_headers() - * Improve mock server used by tests so it can mock specifc url with specific responses - * Add method to get active channels of a particular channel type category - * Replace remaining occurrences of assertEquals - * Fix the way to check USSD support - * Dynamic channels: Vumi and Vumi USSD - -v3.0.230 ----------- - * Deal with malformed group format as part of group updates - * Allow installs to configure how many fields they want to keep in @extra - * Fix Nexmo icon - * Add logs for incoming requests for InfoBip - * Do both Python 2 and 3 linting in a single build job - -v3.0.229 ----------- - * Do not set external ID for InfoBip we have send them our ID - * Fix channel address comparison to be insensitive to + - * Use status groupId to check from the InfoBip response to know if the request was erroneous - -v3.0.228 ----------- - * Add id to reserved field list - -v3.0.227 ----------- - * Update Infobip channel type to use the latest JSON API - * Migrate flows forward to have dependencies - -v3.0.226 ----------- - * Fix issue with dates in the contact field extractor - * Allow org admin to remove invites - -v3.0.225 ----------- - * Optimize how we check for unsent messages on channels - * Ensure all actions have a UUID in new flow spec version 10.1 - * Fixes viber URN validation: can be up to 24 chars - * Dynamic channels: Zenvia, YO - * Add support for minor flow migrations - -v3.0.224 ----------- - * Remove duplicate excellent includes (only keep compressed version) - -v3.0.222 ----------- - * Only show errors in UI when org level limits of groups etc are exceeded - * Improve error messages when org reaches limit of groups etc - -v3.0.221 ----------- - * Add indexes for retying webhook events - -v3.0.220 ----------- - * Remove no longer used Msg.priority (requires latest Mage) - -v3.0.219 ----------- - * Create channel event only for active channels - * Limit SMS Central channel type to the Kathmandu timezone - * Create fields from expressions on import - * Flow dependencies for fields, groups, and flows - * Dynamic channels: Start - * Dynamic channels: SMS Central - -v3.0.218 ----------- - * Delete simulation messages in batch of 25 to use the response_to index - * Fix Kannel channel type icon - * @step.contact and @contact should both be the run contact - * Migration to set value_type on all RuleSets - -v3.0.217 ----------- - * Add page titles for common pages - * New index for contact history - * Exit flows in batches so we dont have to grab all runs at once - * Check we can create a new groups before importing contact and show the error message to the user - * Fixes value type guessing on rulesets (we had zero typed as dates) - * Update po files - * Dynamic channels: Shaqodoon - -v3.0.216 ----------- - * Should filter user groups by org before limiting to 250 - * Fixes for slow contact history - * Allow updating existing fields via API without checking the count - * Update TWIML IVR protocol check - * Add update form fields in dynamic channel types - * Abstract out the channel update view form classes - * Add ivr_protocol field on channel type - * Mock constants to not create a lot of objects in test DB - * Limit the contact fields max per org to 200 to below the max form post fields allowed - * Limit number of contact groups creation on org to 250 - * Limit number of contact fields creation on org to 250 - * Dynamic channels: Red Rabbit, Plivo Nexmo - -v3.0.212 ----------- - * Make Msg.priority nullable so courier doesn't have to write to it - * Calculate TPS cost for messages and add them to courier queues - * Fix truncate cases in SQL triggers - * Fix migration to recreate trigger on msgs table - * Dynamic channels: Mblox - -v3.0.211 ----------- - * Properly create event fires for campaign events updated through api - * Strip matched string in not empty test - * Dynamic channels: Macrokiosk - -v3.0.210 ----------- - * Make message priority be based on responded state of flow runs - * Support templatized urls in media - * Add UI for URL Attachments - * Prevent creation of groups and labels at flow run time - * Dynamic channels: M3Tech, Kannel, Junebug and Junebug USSD - -v3.0.209 ----------- - * Add a way to specify the prefixes short codes should be matching - * Include both high_priority and priority in courier JSON - * Fix TwiML migration - * Fix JSON response when searching Plivo numbers - -v3.0.208 ----------- - * Msg.bulk_priority -> Msg.high_priority - * Change for currencies for numeric rule - * Dynamic channels for Jasmin, Infobip, and Hub9 - -v3.0.207 ----------- - * Fix Twiml config JSON keys - * Unarchiving a campaign should unarchive all its flows - -v3.0.206 ----------- - * Fix broken Twilio Messaging Service status callback URL - * Only update dynamic groups from set_field if value has changed - * Optimize how we lookup contacts for some API endpoints - * More dynamic channels - -v3.0.205 ----------- - * add way to show recommended channel on claim page for dynamic channels - * change Org.get_recommended_channel to return the channel type instead of a random string - -v3.0.204 ----------- - * separate create and drop index operations in migration - -v3.0.203 ----------- - * create new compound index on channel id and external id, remove old external id index - * consistent header for contact uuid in exports and imports - * unstop contacts in handle message for new messages - * populate @extra even on webhook failures - * fix flow simulator with chatbase connected - * use ContactQL for name of contact querying grammar - * dynamic channels: Clickatell - * fix contact searching where text includes + or / chars - * replace Ply with ANTLR for contact searching (WIP) - -v3.0.201 ----------- - * Make clean string method replace non characteres correctly - -v3.0.200 ----------- - * Support Telegram /start command to trigger new conversation trigger - -v3.0.199 ----------- - * Use correct Twilio callback URL, status is for voice, keep as handler - -v3.0.198 ----------- - * Add /c/kn/uuid-uuid-uuid/receive style endpoints for all channel types - * Delete webhook events in batches - * Dynamic channels: Blackmyna - -v3.0.197 ----------- - * update triggers so that updates in migration work - -v3.0.196 ----------- - * make sure new uuids are honored in in_group tests - * removes giant join through run/flow to figure out flow steps during export - * create contacts from start flow action with ambiguous country - * add tasks for handling of channel events, update handlers to use ChannelEvent.handle - * add org level dashboard for multi-org organizations - -v3.0.195 ----------- - * Tweaks to allow message handling straight from courier - -v3.0.193 ----------- - * Add flow session model and start creating instances for IVR and USSD channel sessions - -v3.0.192 ----------- - * Allow empty contact names for surveyor submissions but make them null - * Honor admin org brand in get_user_orgs - * Fix external channel bulk sender URL - * Send broadcast in the same task as it is created in and JS utility method to format number - * Try the variable as a contact uuid and use its contact when building recipients - * Fix org lookup, use the same code path for sending a broadcast - * Fix broadcast to flow node to consider all current contacts on the the step - -v3.0.191 ----------- - * Update test_db to generate deterministic UUIDs which are also valid UUID4 - -v3.0.190 ----------- - * Turn down default courier TPS to 10/s - -v3.0.189 ----------- - * Make sure msg time never wraps in the inbox - -v3.0.188 ----------- - * Use a real but mockable HTTP server to test flows that hit external URLs instead of mocking the requests - * Add infobip as dynamic channel type and Update it to use the latest Infobip API - * Add support for Courier message sending - -v3.0.183 ----------- - * Use twitter icon for twitter id urns - -v3.0.182 ----------- - * Tweak test_start_flow_action to test parent run states only after child runs have completed - * Stop contacts when they have only an invalid twitter screen name - * Change to max USSD session length - -v3.0.181 ----------- - * Ignore case when looking up twitter screen names - -v3.0.180 ----------- - * Switch to using twitterid scheme for Twitter messages - * Should be shipped before Mage v0.1.84 - -v3.0.179 ----------- - * Allow editing of start conversation triggers - -v3.0.178 ----------- - * Remove urn field, urn compound index, remove last uses of urn field - -v3.0.177 ----------- - * remove all uses of urn (except when writing) - * create display index, backfill identity - * Allow users to specify extra URNs columns to include on the flow results export - -v3.0.176 ----------- - * Add display and identity fields to ContactURN - * Add schemes field to allow channels to support more than one scheme - -v3.0.175 ----------- - * Fix incorrect lambda use so message sending works - -v3.0.174 ----------- - * Make ContactField.uuid unique and non-null - -v3.0.173 ----------- - * Add migration to populate ContactField.uuid - -v3.0.172 ----------- - * Only try to delete Twilio app when channel config contains 'application_sid' - * Surveyor submissions should try rematching the rules if the same ruleset got updated by the user and old rules were removed - * Add uuid field to ContactField - * Convert more channel types to dynamic types - -v3.0.171 ----------- - * Fixes for Twitter Activity channels - * Add stop contact command to mage handler - * Convert Firebase Cloud Messaging to a dynamic channel type - * Convert Viber Public to a dynamic channel type - * Change to the correct way for dynamic channel - * Convert LINE to a dynamic channel type - * Better message in SMS alert email - -v3.0.170 ----------- - * Hide SMTP config password and do not change the set password if blank is submitted - * Validate the length of message campaigns for better user feedback - * Make FlowRun.uuid unique and non-null (advise faking this and building index concurrently) - -v3.0.169 ----------- - * Migration to populate FlowRun.uuid. Advise faking this and running manually. - * More channel logs for Jiochat channel interactions - -v3.0.167 ----------- - * Fix inclusion of attachment urls in webhook payloads and add tests - * Install lxml to improve performance of large Excel exports - * Add proper deactivation of Telegram channels - * Converted Facebook and Telegram to dynamic channel types - * Add nullable uuid field to FlowRun - * Make sure we consider all URN schemes we can send to when looking up the if we have a send channel - * Split Twitter and Twitter Beta into separate channel types - * Remove support for old-style Twilio endpoints - -v3.0.166 ----------- - * Release channels before Twilio/Nexmo configs are cleared - * Expose flow start UUID on runs from the runs endpoint - -v3.0.165 ----------- - * Migration to populate FlowStart.uuid on existing objects (advise faking and run manually) - -v3.0.163 ----------- - * Add uuid field to FlowStart - * Migration to convert TwiML apps - -v3.0.160 ----------- - * Add support for Twitter channels using new beta Activity API - -v3.0.159 ----------- - * Clean incoming message text to remove invalid chars - -v3.0.158 ----------- - * Add more exception currencies for pycountry - * Support channel specific Twilio endpoints - -v3.0.156 ----------- - * Clean up pip-requires and reset pip-freeze - -v3.0.155 ----------- - * Reduce the rate limit for SMS central to 1 requests per second - * Display Jiochat on channel claim page - * Fix date pickers on modal forms - * Update channels to generate messages with multiple attachments - -v3.0.154 ----------- - * Rate limit sending throught SMS central to 10 messages per second - * Fix some more uses of Context objects no longer supported in django 1.11 - * Fix channel log list request time display - * Add @step.text and @step.attachments to message context - -v3.0.153 ----------- - * Jiochat channels - * Django 1.11 - -v3.0.151 ----------- - * Convert all squashable and prunable models to use big primary keys - -v3.0.150 ----------- - * Drop database-level length restrictions on msg and values - * Add sender ID config for Macrokiosk channels - * Expose org credit information on API org endpoint - * Add contact_uuid parameter to update FCM user - * Add configurable webhook header fields - -v3.0.148 ----------- -* Fix simulator with attachments -* Switch to using new recent messages model - -v3.0.147 ----------- - * Migration to populate FlowPathRecentMessage - * Clip messages to 640 chars for recent messages table - -v3.0.145 ----------- - * Change Macrokiosk time format to not have space - * Better error message for external channel handler for wrong time format - * Add new model for tracking recent messages on flow path segments - -v3.0.144 ----------- - * Remove Msg.media field that was replaced by Msg.attachments - * Change default ivr timeouts to 2m - * Fix the content-type for Twilio call response - -v3.0.143 ----------- - * Update contact read page and inbox views to show multiple message attachments - * Fix use of videojs to provide consistent video playback across browsers - * API should return error message if user provides something unparseable for a non-serializer param - -v3.0.142 ----------- - * Fix handling of old msg structs with no attachments attribute - * Tweak in create_outgoing to prevent possible NPEs in flow execution - * Switch to using Msg.attachments instead of Msg.media - * Replace index on Value.string_value with one that is limited to first 32 chars - -v3.0.139 ----------- -* Fix Macrokiosk JSON responses - -v3.0.138 ----------- - * Migration to populate attachments field on old messages - -v3.0.137 ----------- - * Don't assume event fires still exist in process_fire_events - * Add new Msg.attachments field to hold multiple attachments on an incoming message - -v3.0.136 ----------- - * Fix scheduled broadcast text display - -v3.0.135 ----------- - * Make 'only' keyword triggers ignore punctuation - * Make check_campaigns_task lock on the event fires that it will queue - * Break up flow event fires into sub-batches of 500 - * Ignore and ack incoming messages from Android relayer that have no number - -v3.0.134 ----------- - * Add match_type option to triggers so users can create triggers which only match when message only contains keyword - * Allow Africa's talking to retry sending message - * Allow search on the triggers pages - * Clear results for analytics when user removes a flow run - -v3.0.133 ----------- - * Make Msg.get_sync_commands more efficent - * Fix open range airtime transfers - * Fix multiple Android channels sync - * Fix parsing of macrokiosk channel time format - * Ensure that our select2 boxes show "Add new" option even if there is a partial match with an existing item - * Switch to new translatable fields and remove old Broadcast fields - * Add Firebase Cloud messaging support for Android channels - -v3.0.132 ----------- - * Migration to populate new translatable fields on old broadcasts. This migration is slow on a large database so it's - recommended that large deployments fake it and run it manually. - -v3.0.128 ----------- - * Add new translatable fields to Broadcast and ensure they're populated for new stuff - -v3.0.127 ----------- - * Fix autocomplete for items containing digits or other items - * Make autocomplete dropdown disappear when user clicks in input box - * Replace usages of "SMS" with "message" in editor - * Allow same subflow to be called without pause in between - -v3.0.126 ----------- - * Fix exporting messages by a label folder - * Improve performance of org export page for large orgs - * Make it easier to enable/disable debug toolbar - * Increase channel logging for requests and responses - * Change contact api v1 to insert nonexistent fields - * Graceful termination of USSD sessions - -v3.0.125 ----------- - * Don't show deleted flows on list page - * Convert timestamps sent by MacroKiosk from local Kuala Lumpur time - -v3.0.124 ----------- - * Move initial IVR expiration check to status update on the call - * Hide request time in channel log if unset - * Check the existance of broadcast recipients before adding - * Voice flows import should never allow expirations longer than 15 mins - * Fix parse location to correctly use the tokenizized text if the location was matched for the entire text - * Use updates instead of full Channel saves() on realyer syncs, only update when there are changes - -v3.0.123 ----------- - * Use flow starts for triggers that operate on groups - * Handle throttling errors from Nexmo when using API to add new numbers - * Convert campaign event messages to HSTORE fields - -v3.0.121 ----------- - * Add MACROKIOSK channel type - * Show media for MMS in simulator - -v3.0.120 ----------- - * Fix send all bug where we append list of messages to another list of messages - * Flows endpooint should allow filtering by modified_on - -v3.0.119 ----------- - * More vertical form styling tweaks - -v3.0.118 ----------- - * Add flow link on subflow rulesets in flows - -v3.0.117 ----------- - * Fix styling on campaign event modal - -v3.0.116 ----------- - * Update to latest Raven - * Make default form vertical, remove horizontal to vertical css overrides - * Add flow run search and deletion - * Hangup calls on channels release - -v3.0.115 ----------- - * Allow message exports by label, system label or all messages - * Fix for double stacked subflows with immediate exits - -v3.0.112 ----------- - * Archiving a flow should interrupt all the current runs - -v3.0.111 ----------- - * Display webhook results on contact history - * Clean up template tags used on contact history - * Allow broadcasts to be sent to all urns belonging to the specified contacts - -v3.0.109 ----------- - * Data migration to populate broadcast send_all field - -v3.0.108 ----------- - * Add webhook events trim task with configurable retain times for success and error logs - -v3.0.107 ----------- - * Add send_all broadcast field - -v3.0.106 ----------- - * Remove non_atomic_gets and display message at /api/v1/ to explain API v1 has been replaced - * Add squashable model for label counts - * Split system label functionality into SystemLabel and SystemLabelCount - -v3.0.105 ----------- - * Link subflow starts in actions - * Allow wait to wait in flows with warning - -v3.0.104 ----------- - * Add new has email test, contains phrase test and contains only phrase test - -v3.0.103 ----------- - * Migration to populate FlowNodeCount shouldn't include test contacts - -v3.0.102 ----------- - * Add migration to populate FlowNodeCount - -v3.0.101 ----------- - * Migration to clear no-longer-used flow stats redis keys - * Replace remaining cache-based flow stats code with trigger based FlowNodeCount - -v3.0.100 ----------- - * Fix intermittently failing Twilio test - * make sure calls have expiration on initiation - * Update to latest smartmin - * Add redirection for v1 endpoints - * Fix webhook docs - * Fix MsgCreateSerializer not using specified channel - * Test coverage - * Fix test coverage issues caused by removing API v1 tests - * Ensure surveyor users still have access to the API v2 endpoint thats they need - * Remove djangorestframework-xml - * Restrict API v1 access to surveyor users - * Block all API v2 writes for suspended orgs - * Remove all parts of API v1 not used by Surveyor - -v3.0.99 ----------- - * Prioritize msg handling over timeotus and event fires - * Remove hamlcompress command as deployments should use regular compress these days - * Fix not correctly refreshing dynamic groups when a URN is removed - * Allow searching for contacts *with any* value for a given field - -v3.0.98 ----------- - * Fix sidebar nav LESS so that level2 lists don't have fixed height and separate scrolling - * Unstop a contact when we get an explicit user interaction such as follow - -v3.0.96 ----------- - * Fix possible race condition between receiving and handling messages - * Do away with scheme for USSD, will always be TEL - * Make sure events are handled properly for USSD - * Do not specify to & from when using reply_to - * Update JunebugForm for editing Junebug Channel + config fields - -v3.0.95 ----------- - * Log request time on channel log success - -v3.0.94 ----------- - * Fix test, fix template tags - -v3.0.93 ----------- - * Change request times to be in ms instead of seconds - -v3.0.92 ----------- - * Block on handling incoming msgs so we dont process them forever away - * Include Viber channels in new conversation trigger form channel choices - -v3.0.90 ----------- - * Don't use cache+calculations for flow segment counts - these are pre-calculated in FlowPathCount - * Do not include active contacts in flows unless user overrides it - * Clean up middleware imports and add tests - * Feedback to user when simulating a USSD channel without a USSD channel connected - -v3.0.89 ----------- - * Expand base64 charset, fix decode validity heuristic - -v3.0.88 ----------- - * Deal with Twilio arbitrarily sending messages as base64 - * Allow configuration of max text size via settings - -v3.0.87 ----------- - * Set higher priority when sending responses through Kannel - -v3.0.86 ----------- - * Do not add stopped contacts to groups when importing - * Fix an entire flow start batch failing if one run throws an exception - * Limit images file size to be less than 500kB - * Send Facebook message attachments in a different request as the text message - * Include skuid for open range tranfertto accounts - -v3.0.85 ----------- - * Fix exception when handling Viber msg with no text - * Migration to remove no longer used ContactGroup.count - * Fix search queries like 'foo bar' where there are more than one condition on name/URN - * Add indexes for Contact.name and ContactURN.path - * Replace current omnibox search function with faster and simpler top-25-of-each-type approach - -v3.0.84 ----------- - * Fix Line, FCM icons, add Junebug icon - -v3.0.83 ----------- - * Render missing field and URN values as "--" rather than "None" on Contact list page - -v3.0.82 ----------- - * Add ROLE_USSD - * Add Junebug USSD Channel - * Fix Vumi USSD to use USSD Role - -v3.0.81 ----------- - * Archive triggers that do not have a contact to send to - * Disable sending of messages for blocked and stopped contacts - -v3.0.80 ----------- - * Add support for outbound media on reply messages for Twilio MMS (US, CA), Telegram, and Facebook - * Do not throw when viber sends us message missing the media - * Optimizations around Contact searching - * Send flow UUID with webhook flow events - -v3.0.78 ----------- - * Allow configuration of max message length to split on for External channels - -v3.0.77 ----------- - * Use brand key for evaluation instead of host when determining brand - * Add red rabbit type (hidden since MT only) - * Fix flow results exports for broadcast only flows - -v3.0.76 ----------- - * Log Nexmo media responses without including entire body - -v3.0.75 ----------- - * Dont encode to utf8 for XML and JSON since they expect unicode - * Optimize contact searching when used to determine single contact's membership - * Use flow system user when migrating flows, avoid list page reorder after migrations - -v3.0.74 ----------- - * reduce number of lookup to DB - -v3.0.73 ----------- - * Add test case for search URL against empty field value - * Fix sending vumi messages initiated from RapidPro without response to - -v3.0.72 ----------- - * Improvements to external channels to allow configuration against JSON and XML endpoints - * Exclude test contacts from flow results - * Update to latest smartmin to fix empty string searching - -v3.0.70 ----------- - * Allow USSD flows to start someone else in a flow - * Include reply to external_id for Vumi channel - -v3.0.69 ----------- - * Add ID column to result exports for anon orgs - * Deactivate runs when releasing flows - * Fix urn display for call log - * Increased send and receive channel logging for Nexmo, Twilio, Twitter and Telegram - * Allow payments through Bitcoins - * Include TransferTo account currency when asking phone info to TransferTo - * Don't create inbound messages for gather timeouts, letting calls expire - * Don't show channel log for inactive channels on contact history - * Upgrade to latest smartmin which changes created_on/modified_on fields on SmartModels to be overridable - * Uniform call and message logs - -v3.0.64 ----------- - * Add ID column to anonymous org contact exports, also add @contact.id field in message context - * Fix counts for channel log elements - * Only have one link on channel page for sending log - * Attempt to determine file types for msg attachments using libmagic - * Deactivate runs on hangups, Keep ivr runs open on exit - * Add log for nexmo media download - * Add new perf_test command to run performance tests on database generated with make_test_db - -v3.0.62 ----------- - * Fix preferred channels for non-msg channels - -v3.0.61 ----------- - * Make migrations to populate new export task fields non-atomic - * Add indexes for admin boundaries and aliases - * Nexmo: make sure calls are ended on hangup, log hangups and media - * Fix inbound calls on Nexmo to use conversation_uuid - * Style tweaks for zapier widget - * Use shorter timeout for IVR - * Issue hangups on expiration during IVR runs - * Catch all exceptions and log them when initiating call - * Fix update status for Nexmo calls - -v3.0.48 ----------- - * Add channel session log page - * Use brand variable for zaps to show - * Additional logging for nexmo - * Increase non-overlap on timeout queueing, never double queue single timeout - * Fix broken timeout handling when there is a race - * Make field_keys a required parameter - * Speed up the contact import by handling contact update at once after all the fields are set - -v3.0.47 ----------- - * Add channel log for Nexmo call initiation - * Fix import-geojson management command - -v3.0.46 ----------- - * Fix Contact.search so it doesn't evaluate the base_query - * Enable searching in groups and blocked/stopped contacts - -v3.0.45 ----------- - * Fix absolute positioning for account creation form - * Add Line channel icon in fonts - * Add data migrations to update org config to connect to Nexmo - -v3.0.43 ----------- - * Add Malawi as a country for Africa's Talking - -v3.0.42 ----------- - * Widen pages to browser width so more can fit - * Fix the display of URNs on contact list page - * Fix searching of Nexmo number on connected accounts - -v3.0.41 ----------- - * Fix channel countries being duplicated for airtime configuration - * Add make_sql command to generate SQL files for an app, reorganize current SQL reference files - * Added SquashableModel and use it for all squashable count classes - -v3.0.40 ----------- - * Add support for Nexmo IVR - * Log IVR interactions in Channel Log - -v3.0.37 ----------- - * Fix to make label of open ended response be All Response even if there is timeout on the ruleset - * Data migration to rename category for old Values collected with timeouts - -v3.0.36 ----------- - * Add 256 keys to @extra, also enforce ordering so it is predictible which are included - * Make fetching flow run stats more efficient and expose number of active runs on flow run endpoint - * Migration to populate session on msg and ended_on where it is missing - -v3.0.35 ----------- - * Offline context per brand - -v3.0.34 ----------- - * Add Junebug channel type - * Better base styling for dev project - * Pass charset parameter to Kannel when sending unicode - * Zero out minutes, seconds, ms for campaign events with set delivery horus - * Add other URN types to contact context, return '' if missing, '*' mask for anon orgs - * Make sure Campaigns export base_language for simple message events, honor on import - -v3.0.33 ----------- - * Change ansible command run on vagrant up from syncdb to migrate - * Remove no longer needed django-modeltranslation - * Keep up to 256 extra keys from webhooks instead of 128 - * Add documentation of API rate limiting - -v3.0.32 ----------- - * Make styling variables uniform across branding - * Make brand styling optional - -v3.0.28 ----------- - * Add support for subflows over IVR - -v3.0.27 ----------- - * Fix searching for Twilio numbers, add unit tests - * Fix API v1 run serialization when step messages are purged - -v3.0.26 ----------- - * Adds more substitutions from accented characters to gsm7 plain characters - -v3.0.25 ----------- - * Populate ended_on for ivr calls - * Add session foreign key to Msg model - -v3.0.24 ----------- - * Fix bug in starting calls from sessions - -v3.0.23 ----------- - * Remove flow from ChannelSession, sessions can span many runs/flows - * Remove superfluous channelsession.parent - -v3.0.22 ----------- - * Migration to update existing twiml apps with a status_callback, remove api/v1 references - -v3.0.21 ----------- - * Various tweaks to wording and presentation around custom SMTP email config - -v3.0.20 ----------- - * Allow orgs to set their own SMTP server for outgoing emails - * Return better error message when To number not passed to Twilio handler - * Exclude Flow webhook events from retries (we try once and forget) - * Don't pass channel in webhook events if we don't know it - * Use JsonResponse and response.json() consistently - * Replace json.loads(response.content) with response.json() which properly decodes on Python 3 - -v3.0.19 ----------- - * Improve performance of contact searches by location by fetching locations in separate query - -v3.0.18 ----------- - * Update pyparsing to 2.1.10 - * Update to new django-hamlpy - * Display flow runs exits on the contact timeline - * Fix Travis settings file for Python 3 - * Fix more Python 3 syntax issues - * Fix RecentMessages no longer supporting requests with multiple rules, and add tests for that - * Use print as function rather than statement for future Python 3 compatibility - * Do not populate contact name for anon orgs from Viber - * Add is_squashed to FlowPathCount and FlowRunCount - * Updates to using boto3, if using AWS for storing imports or exports you'll need to change your settings file: `DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'` - -v3.0.14 ----------- - * Allow for the creation of Facebook referral triggers (opt-in on FB) - * Allow for whitelisting of domains for Facebook channels - -v3.0.13 ----------- - * New contact field editing UI with Intercooler modals - -v3.0.9 ----------- - * Update RecentMessages view to use new recent messages model - * Remove now unused indexes on FlowStep - -v3.0.8 ----------- - * Adds data migration to populate FlowPathRecentStep from existing Flow Steps - -v3.0.7 ----------- - * Introduce new model, FlowPathRecentStep that tracks recent steps from one flow node to another. This will replace the rather expensive index used to show recent flow activity on a flow path. - -v3.0.10 ----------- - * Log any exceptions encountered in Celery tasks to Raven - * Tell user to get pages_messaging_subscriptions permission for their FB app - -v3.0.6 ----------- - * Replace unicode non breaking spaces with a normal space for GSM7 encoding (Kannel only) - * Add migrations for custom indexes (existing installs before v3 should fake these) - -v3.0.5 ----------- - * fix styling on loader ball animation - -v3.0.4 ----------- - * Fix issue causing flow run table on flow dashboard to be very slow if a flow contained many responses - -v3.0.3 ----------- - * Refactor JSON responses to use native Django JSONResponse - * Dont use proxy for Dart Media and Hub9, expose IPs to whitelist - -v3.0.2 ----------- - * Fixes DartMedia channel for short codes - -v3.0.1 ----------- - * Remove django-celery as it is unneeded, also stop saving Celery tombstones as we now store - all task state (ContactImport for example) directly in models - -v3.0.0 ----------- - * IMPORTANT: This release resets all Temba migrations. You need to run the latest migrations - from a version preceding this one, then fake all temba migrations when deploying: ``` % python manage.py migrate csv_imports % python manage.py migrate airtime --fake % python manage.py migrate api --fake -% python manage.py migrate campaigns --fake +% python manage.py migrate campaigns --fake % python manage.py migrate channels --fake % python manage.py migrate contacts --fake % python manage.py migrate flows --fake @@ -10180,9 +10799,10 @@ v3.0.0 % python manage.py migrate values --fake % python manage.py migrate ``` - * Django 1.10 - * Guardian 1.4.6 - * MPTT 0.8.7 - * Extensions 1.7.5 - * Boto 2.45.0 - * Django Storages 1.5.1 + +- Django 1.10 +- Guardian 1.4.6 +- MPTT 0.8.7 +- Extensions 1.7.5 +- Boto 2.45.0 +- Django Storages 1.5.1 diff --git a/README.md b/README.md index 8eb04bd6689..731eb091894 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,21 @@ -# TextIt +# RapidPro [![Build Status](https://github.com/nyaruka/rapidpro/workflows/CI/badge.svg)](https://github.com/nyaruka/rapidpro/actions?query=workflow%3ACI) [![codecov](https://codecov.io/gh/nyaruka/rapidpro/branch/main/graph/badge.svg)](https://codecov.io/gh/nyaruka/rapidpro) -TextIt is a hosted service for visually building interactive messaging applications. You can signup at +RapidPro is a cloud based SaaS developed by [TextIt](https://textit.com) for visually building interactive messaging applications. You can signup at [textit.com](https://textit.com) or host it yourself. -### Stable Versions +## Technology Stack -The set of versions that make up the latest stable release are: + * [PostgreSQL](https://www.postgresql.org) + * [Redis](https://redis.io) + * [Elasticsearch](https://www.elastic.co/elasticsearch) + * [S3](https://aws.amazon.com/s3/) + * [DynamoDB](https://aws.amazon.com/dynamodb/) + * [Cloudwatch](https://aws.amazon.com/cloudwatch/) - * [RapidPro 9.2.5](https://github.com/nyaruka/rapidpro/releases/tag/v9.2.5) - * [Mailroom 9.2.2](https://github.com/nyaruka/mailroom/releases/tag/v9.2.2) - * [Courier 9.2.1](https://github.com/nyaruka/courier/releases/tag/v9.2.1) - * [Indexer 9.2.0](https://github.com/nyaruka/rp-indexer/releases/tag/v9.2.0) - * [Archiver 9.2.0](https://github.com/nyaruka/rp-archiver/releases/tag/v9.2.0) - -### Versioning +## Versioning Major releases are made every 6 months on a set schedule. We target January as a major release (e.g. `9.0.0`), then July as the stable dot release (e.g. `9.2.0`). Unstable releases (i.e. *development* versions) have odd minor versions @@ -29,3 +28,13 @@ for the latest stable release you are on, then every stable release afterwards. Generally we only do bug fixes (patch releases) on stable releases for the first two weeks after we put out that release. After that you either have to wait for the next stable release or take your chances with an unstable release. + +### Stable Versions + +The set of versions that make up the latest stable release are: + + * [RapidPro 9.2.5](https://github.com/nyaruka/rapidpro/releases/tag/v9.2.5) + * [Mailroom 9.2.2](https://github.com/nyaruka/mailroom/releases/tag/v9.2.2) + * [Courier 9.2.1](https://github.com/nyaruka/courier/releases/tag/v9.2.1) + * [Indexer 9.2.0](https://github.com/nyaruka/rp-indexer/releases/tag/v9.2.0) + * [Archiver 9.2.0](https://github.com/nyaruka/rp-archiver/releases/tag/v9.2.0) diff --git a/code_check.py b/code_check.py index 055a5aa6346..cbdb44a8ac6 100755 --- a/code_check.py +++ b/code_check.py @@ -36,10 +36,10 @@ def status(line): cmd("python manage.py makemigrations --check") status("Running isort") - cmd("isort temba") + cmd("isort --check temba") status("Running black") - cmd("black temba") + cmd("black --check temba") status("Running ruff") cmd("ruff check temba") diff --git a/media/test_flows/add_label.json b/media/test_flows/add_label.json deleted file mode 100644 index 9a41dc555db..00000000000 --- a/media/test_flows/add_label.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "version": "11.10", - "site": "https://textit.in", - "flows": [ - { - "entry": "22505d46-43c5-42ba-975e-725c01ea440f", - "action_sets": [ - { - "uuid": "22505d46-43c5-42ba-975e-725c01ea440f", - "x": 100, - "y": 0, - "destination": "f3a1a671-5f5b-489e-9410-9a09fa5eaafb", - "actions": [ - { - "type": "reply", - "uuid": "27dfd8ac-55c5-49c9-88e3-3fb84a9894ff", - "msg": { - "eng": "Hey" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "6e2b09ec-3cc0-4ee6-ae7b-b76bad3ab6d3" - }, - { - "uuid": "f3a1a671-5f5b-489e-9410-9a09fa5eaafb", - "x": 95, - "y": 101, - "destination": "78c20ee4-94bd-45e6-8510-8e602568fb6e", - "actions": [ - { - "type": "add_label", - "uuid": "bc82c11d-7654-44e4-966c-fb39e2851df0", - "labels": [ - { - "uuid": "0bfecd01-9612-48ab-8c49-72170de6ee49", - "name": "Hello" - } - ] - } - ], - "exit_uuid": "84bf44a1-13fd-44cb-8014-d6feb06e010f" - }, - { - "uuid": "7ca2b0ef-0b23-4c6e-bccb-c5f2d62d2663", - "x": 146, - "y": 358, - "destination": null, - "actions": [ - { - "type": "add_label", - "uuid": "910bf3b5-951f-47a8-93df-11a6eac8bf0f", - "labels": [ - { - "uuid": "0bfecd01-9612-48ab-8c49-72170de6ee49", - "name": "Hello" - } - ] - } - ], - "exit_uuid": "6d579c28-9f3f-4584-bd2e-74009612fdbb" - } - ], - "rule_sets": [ - { - "uuid": "78c20ee4-94bd-45e6-8510-8e602568fb6e", - "x": 85, - "y": 219, - "label": "Response 1", - "rules": [ - { - "uuid": "33438bbf-49bd-4468-9a74-bbd7e1f58f57", - "category": { - "eng": "All Responses" - }, - "destination": "7ca2b0ef-0b23-4c6e-bccb-c5f2d62d2663", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "", - "operand": "@step.value", - "config": {} - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.10", - "metadata": { - "name": "Add Label", - "saved_on": "2019-02-12T09:23:05.746930Z", - "revision": 7, - "uuid": "e9b5b8ba-43f4-4bc2-a790-811ee1cfe392", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/bad_send_action.json b/media/test_flows/bad_send_action.json deleted file mode 100644 index d5b66d3eae2..00000000000 --- a/media/test_flows/bad_send_action.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "base_language": "base", - "action_sets": [ - { - "y": 795, - "x": 705, - "destination": "610d3c9d-7d2c-4aa4-b0eb-a07c823f6964", - "uuid": "0406607c-e711-4cbb-9c69-cbe3ce785dbd", - "actions": [ - { - "uuid": "c7e3dc19-dc4a-45a0-a4f7-7f57720c3ce5", - "contacts": [ - { - "urns": [ - { - "priority": 50, - "path": "+14255551212", - "scheme": "tel" - } - ], - "id": "contact1_id", - "name": "Mark" - }, - { - "urns": [ - { - "priority": 50, - "path": "+12065551212", - "scheme": "tel" - } - ], - "id": "contact2_id", - "name": "Gregg" - } - ], - "variables": [], - "groups": [], - "msg": { - "base": "Hey there, here's a message." - }, - "type": "send" - } - ] - }], - "version": 8, - "flow_type": "F", - "entry": "d41f3f4d-1742-44a0-b5d5-3d814c804832", - "rule_sets": [], - "type": "F", - "metadata": { - "revision": 110, - "expires": 5, - "saved_on": "2015-12-08T17:55:01.020719Z", - "uuid": "9e73669a-e71b-4e2d-ba3e-336ff0e6447b", - "name": "Send Action Test" - } -} \ No newline at end of file diff --git a/media/test_flows/cataclysm_legacy.json b/media/test_flows/cataclysm_legacy.json deleted file mode 100644 index 64500d8117c..00000000000 --- a/media/test_flows/cataclysm_legacy.json +++ /dev/null @@ -1,252 +0,0 @@ -{ - "version": "11.5", - "site": "https://textit.in", - "flows": [ - { - "entry": "c4462613-5936-42cc-a286-82e5f1816793", - "action_sets": [ - { - "uuid": "eca0f1d7-59ef-4a7c-a4a9-9bbd049eb144", - "x": 76, - "y": 99, - "destination": "d21be990-5e48-4e4b-995f-c9df8f38e517", - "actions": [ - { - "type": "add_group", - "uuid": "feb7a33e-bc8b-44d8-9112-bc4e910fe304", - "groups": [ - { - "uuid": "1966e54a-fc30-4a96-81ea-9b0185b8b7de", - "name": "Cat Fanciers" - } - ] - }, - { - "type": "add_group", - "uuid": "ca82f0e0-43ca-426c-a77c-93cf297b8e7c", - "groups": [ - { - "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", - "name": "Catnado" - } - ] - }, - { - "type": "reply", - "uuid": "d57e9e9f-ada4-4a22-99ef-b8bf3dbcdcae", - "msg": { - "eng": "You are a cat fan! Purrrrr." - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "55f88a1e-73ad-4b6d-9a04-626046bbe5a8" - }, - { - "uuid": "ef389049-d2e3-4343-b91f-13ea2db5f943", - "x": 558, - "y": 94, - "destination": "d21be990-5e48-4e4b-995f-c9df8f38e517", - "actions": [ - { - "type": "del_group", - "uuid": "cea907a8-af81-49af-92e6-f246e52179fe", - "groups": [ - { - "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", - "name": "Catnado" - } - ] - }, - { - "type": "reply", - "uuid": "394a328f-f829-43f2-9975-fe2f27c8b786", - "msg": { - "eng": "You are not a cat fan. Hissssss." - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "9ba78afa-948e-44c5-992f-84030f2eaa6b" - }, - { - "uuid": "d21be990-5e48-4e4b-995f-c9df8f38e517", - "x": 319, - "y": 323, - "destination": "35416fea-787d-48c1-b839-76eca089ad2e", - "actions": [ - { - "type": "channel", - "uuid": "78c58574-9f91-4c27-855e-73eacc99c395", - "channel": "bd55bb31-8ed4-4f89-b903-7103aa3762be", - "name": "Telegram: TextItBot" - } - ], - "exit_uuid": "c86638a9-2688-47c9-83ec-7f10ef49de1e" - }, - { - "uuid": "35416fea-787d-48c1-b839-76eca089ad2e", - "x": 319, - "y": 468, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "30d35b8f-f439-482a-91b1-d3b1a4351071", - "msg": { - "eng": "All done." - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "send", - "uuid": "a7b6def8-d315-49bd-82e4-85887f39babe", - "msg": { - "eng": "Hey Cat Fans!" - }, - "contacts": [], - "groups": [ - { - "uuid": "47b1b36c-7736-47b9-b63a-c0ebfb610e61", - "name": "Cat Blasts" - } - ], - "variables": [], - "media": {} - }, - { - "type": "trigger-flow", - "uuid": "540965e5-bdfe-4416-b4dd-449220b1c588", - "flow": { - "uuid": "ef9603ff-3886-4e5e-8870-0f643b6098de", - "name": "Cataclysmic" - }, - "contacts": [], - "groups": [ - { - "uuid": "22a48356-71e9-4ae1-9f93-4021855c0bd5", - "name": "Cat Alerts" - } - ], - "variables": [] - } - ], - "exit_uuid": "f2ef5066-434d-42bc-a5cb-29c59e51432f" - } - ], - "rule_sets": [ - { - "uuid": "c4462613-5936-42cc-a286-82e5f1816793", - "x": 294, - "y": 0, - "label": "Response 1", - "rules": [ - { - "uuid": "17d69564-60c9-4a56-be8b-34e98a2ce14a", - "category": { - "eng": "Cat Facts" - }, - "destination": "eca0f1d7-59ef-4a7c-a4a9-9bbd049eb144", - "destination_type": "A", - "test": { - "type": "in_group", - "test": { - "name": "Cat Facts", - "uuid": "c7bc1eef-b7aa-4959-ab90-3e33e0d3b1f9" - } - }, - "label": null - }, - { - "uuid": "a9ec4d0a-2ddd-4a13-a1d2-c63ce9916a04", - "category": { - "eng": "Other" - }, - "destination": "ef389049-d2e3-4343-b91f-13ea2db5f943", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "group", - "response_type": "", - "operand": "@step.value", - "config": {} - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.5", - "metadata": { - "name": "Cataclysmic", - "saved_on": "2018-10-18T17:03:54.835916Z", - "revision": 49, - "uuid": "ef9603ff-3886-4e5e-8870-0f643b6098de", - "expires": 10080, - "notes": [] - } - }, - { - "entry": "0429d1f9-82ed-4198-80a2-3b213aa11fd5", - "action_sets": [ - { - "uuid": "0429d1f9-82ed-4198-80a2-3b213aa11fd5", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "add_group", - "uuid": "11f61fc6-834e-4cbc-88ee-c834279345e6", - "groups": [ - { - "uuid": "22a48356-71e9-4ae1-9f93-4021855c0bd5", - "name": "Cat Alerts" - }, - { - "uuid": "c7bc1eef-b7aa-4959-ab90-3e33e0d3b1f9", - "name": "Cat Facts" - }, - { - "uuid": "47b1b36c-7736-47b9-b63a-c0ebfb610e61", - "name": "Cat Blasts" - }, - { - "uuid": "1966e54a-fc30-4a96-81ea-9b0185b8b7de", - "name": "Cat Fanciers" - }, - { - "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", - "name": "Catnado" - } - ] - } - ], - "exit_uuid": "029a7c9d-c935-4ed1-9573-543ded29d954" - } - ], - "rule_sets": [], - "base_language": "eng", - "flow_type": "M", - "version": "11.5", - "metadata": { - "name": "Catastrophe", - "saved_on": "2018-10-18T19:03:07.702388Z", - "revision": 1, - "uuid": "d6dd96b1-d500-4c7a-9f9c-eae3f2a2a7c5", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/child.json b/media/test_flows/child.json deleted file mode 100644 index 3edf4d2111e..00000000000 --- a/media/test_flows/child.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "entry": "f692e793-75a8-45a7-ba8c-4d568bd9d8a8", - "rule_sets": [], - "action_sets": [ - { - "y": 1, - "x": 107, - "destination": null, - "uuid": "f692e793-75a8-45a7-ba8c-4d568bd9d8a8", - "actions": [ - { - "uuid": "c9d2abd9-0966-435a-8663-d716b4393df5", - "value": "@date", - "label": "Campaign Date", - "field": "campaign_date", - "action": "GET", - "type": "save" - } - ] - } - ], - "last_saved": "2014-11-20T20:49:08.254645Z", - "metadata": {} - }, - "flow_type": "F", - "name": "Child", - "id": 300 - } - ], - "triggers": [] -} diff --git a/media/test_flows/color_gender_age.json b/media/test_flows/color_gender_age.json deleted file mode 100644 index 93387ef4308..00000000000 --- a/media/test_flows/color_gender_age.json +++ /dev/null @@ -1,215 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "entry": "5dc9d8e1-90c6-4043-bf97-73d35138dc00", - "rule_sets": [ - { - "uuid": "c1911a46-ba8e-48e2-be8f-7c0be30edcdd", - "response_type": "C", - "rules": [ - { - "test": { - "test": "red", - "type": "contains_any" - }, - "category": "Red", - "destination": "f3b33749-c799-47f2-b242-160be2001550", - "uuid": "5a6cd1ec-6d09-4356-9fc0-7b7b71739add" - }, - { - "test": { - "test": "blue", - "type": "contains_any" - }, - "category": "Blue", - "destination": "f3b33749-c799-47f2-b242-160be2001550", - "uuid": "9e496bfe-227c-484e-9a3d-2ba607383c52" - }, - { - "test": { - "test": "green", - "type": "contains_any" - }, - "category": "Green", - "destination": "f3b33749-c799-47f2-b242-160be2001550", - "uuid": "075bb3b0-b104-4acf-8ee7-33046b207343" - }, - { - "test": { - "test": "maroon", - "type": "contains_any" - }, - "category": "Red", - "destination": "f3b33749-c799-47f2-b242-160be2001550", - "uuid": "2c9eb7fe-6084-47f3-9a70-745b11e76991" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": "f3b33749-c799-47f2-b242-160be2001550", - "uuid": "7d217ae2-0a07-45db-95bb-32a887dc1f94" - } - ], - "label": "Color", - "operand": "@step.value", - "y": 146, - "x": 290 - }, - { - "uuid": "fd47736c-6b31-4330-8d49-7dfff3e391a1", - "response_type": "C", - "rules": [ - { - "test": { - "test": "Male", - "type": "contains_any" - }, - "category": "Male", - "destination": "5ad1b145-58ba-4b65-8cb5-84d98172b221", - "uuid": "e86168bc-64fc-4458-a970-884b11b96ffa" - }, - { - "test": { - "test": "Female", - "type": "contains_any" - }, - "category": "Female", - "destination": "5ad1b145-58ba-4b65-8cb5-84d98172b221", - "uuid": "5caa68fe-d64e-4ca6-a782-305930095c62" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": "5ad1b145-58ba-4b65-8cb5-84d98172b221", - "uuid": "129ee4dc-2dff-4973-9d59-d9ccfc8748e1" - } - ], - "label": "Gender", - "operand": "@step.value", - "y": 414, - "x": 384 - }, - { - "uuid": "edec8286-536b-4924-8b94-0af69b41d4c2", - "response_type": "C", - "rules": [ - { - "test": { - "test": "18", - "type": "lt" - }, - "category": "Child", - "destination": "f25f5e8b-dc93-442b-a92d-8d01730e1d99", - "uuid": "c299e0cd-9c6a-4a3c-b7d5-9aa162d58062" - }, - { - "test": { - "test": "65", - "type": "lt" - }, - "category": "Adult", - "destination": "f25f5e8b-dc93-442b-a92d-8d01730e1d99", - "uuid": "139189b0-b4bf-4cfd-ab66-91ea3b40b406" - }, - { - "test": { - "type": "number" - }, - "category": "Senior", - "destination": "f25f5e8b-dc93-442b-a92d-8d01730e1d99", - "uuid": "23265c0d-4a6e-44a7-8d55-f549c801ccd9" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": "f25f5e8b-dc93-442b-a92d-8d01730e1d99", - "uuid": "323b6e22-ae27-405e-b19d-b266f50b2db8" - } - ], - "label": "Age", - "operand": "@step.value", - "y": 695, - "x": 390 - } - ], - "action_sets": [ - { - "y": 0, - "x": 228, - "destination": "c1911a46-ba8e-48e2-be8f-7c0be30edcdd", - "uuid": "5dc9d8e1-90c6-4043-bf97-73d35138dc00", - "actions": [ - { - "msg": "What is your favorite color?", - "type": "reply" - } - ] - }, - { - "y": 298, - "x": 223, - "destination": "fd47736c-6b31-4330-8d49-7dfff3e391a1", - "uuid": "f3b33749-c799-47f2-b242-160be2001550", - "actions": [ - { - "msg": "What is your gender?", - "type": "reply" - } - ] - }, - { - "y": 557, - "x": 224, - "destination": "edec8286-536b-4924-8b94-0af69b41d4c2", - "uuid": "5ad1b145-58ba-4b65-8cb5-84d98172b221", - "actions": [ - { - "type": "save", - "field": "gender", - "label": "Gender", - "value": "@flow.gender" - }, - { - "msg": "What is your age?", - "type": "reply" - } - ] - }, - { - "y": 832, - "x": 217, - "destination": null, - "uuid": "f25f5e8b-dc93-442b-a92d-8d01730e1d99", - "actions": [ - { - "msg": "Thanks.", - "type": "reply" - } - ] - } - ], - "last_saved": "2014-06-26T14:18:09.205715Z", - "metadata": { - "notes": [] - } - }, - "flow_type": "F", - "name": "Color Age Gender", - "id": 23323 - } - ], - "triggers": [] -} diff --git a/media/test_flows/dual_webhook.json b/media/test_flows/dual_webhook.json deleted file mode 100644 index 0ed7a034442..00000000000 --- a/media/test_flows/dual_webhook.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "campaigns": [], - "version": 9, - "site": "https://textit.in", - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "0aabad42-3ec6-40c7-a4cc-c5190b8b4465", - "uuid": "ff642bb5-14fa-4bb6-8040-0ceec395a164", - "actions": [ - { - "msg": { - "eng": "This is the first message" - }, - "type": "reply" - } - ] - }, - { - "y": 310, - "x": 129, - "destination": "6304e1d5-3c0c-44ea-9519-39389227e3c0", - "uuid": "d7523614-1b39-481f-a451-4c4ac9201095", - "actions": [ - { - "msg": { - "eng": "Great, your code is @extra.code. Enter your name" - }, - "type": "reply" - } - ] - } - ], - "version": 9, - "flow_type": "F", - "entry": "ff642bb5-14fa-4bb6-8040-0ceec395a164", - "rule_sets": [ - { - "uuid": "0aabad42-3ec6-40c7-a4cc-c5190b8b4465", - "webhook_action": "POST", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "d7523614-1b39-481f-a451-4c4ac9201095", - "uuid": "1717d336-6fb3-4da0-ac51-4588792e46d2", - "destination_type": "A" - } - ], - "webhook": "http://localhost:49999/code", - "ruleset_type": "webhook", - "label": "Webhook", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 169, - "x": 286, - "config": {} - }, - { - "uuid": "6304e1d5-3c0c-44ea-9519-39389227e3c0", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "8ad78c14-7ebe-4968-82dc-b66dc27d4d96", - "uuid": "da800d48-b1c8-44cf-8e2c-b6c6d5c98aa3", - "destination_type": "R" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Name", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 457, - "x": 265, - "config": {} - }, - { - "uuid": "8ad78c14-7ebe-4968-82dc-b66dc27d4d96", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "uuid": "4dd0f3e7-cc15-41fa-8a84-d53d76d46d66" - } - ], - "webhook": "http://localhost:49999/success", - "ruleset_type": "webhook", - "label": "Webhook 2", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 617, - "x": 312, - "config": {} - } - ], - "metadata": { - "expires": 10080, - "revision": 16, - "uuid": "099d0d1e-3769-472f-9ea7-f3bd5a11c8ff", - "name": "Webhook Migration", - "saved_on": "2016-08-16T16:34:56.351428Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v10.json b/media/test_flows/favorites_bad_group_name_v10.json deleted file mode 100644 index 5b3795fb0b9..00000000000 --- a/media/test_flows/favorites_bad_group_name_v10.json +++ /dev/null @@ -1,432 +0,0 @@ -{ - "version":10, - "flows":[ - { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", - "actions": [ - { - "msg": { - "base": "What is your favorite color?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type": "add_group", - "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups": [ - { - "uuid": "0fdffdb4-3ca4-4d35-b6a7-129b0dfc7d39", - "name": "< 25" - } - ] - }, - { - "type": "del_group", - "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups": [ - { - "uuid": "e5e7bfaf-7c35-4590-8039-c33da2b98d8c", - "name": "> 100" - } - ] - } - ] - }, - { - "y": 437, - "x": 131, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions": [ - { - "msg": { - "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y": 8, - "x": 456, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions": [ - { - "msg": { - "base": "I don't know that color. Try again." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y": 835, - "x": 191, - "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions": [ - { - "msg": { - "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y": 465, - "x": 512, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions": [ - { - "msg": { - "base": "I don't know that one, try again please." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y": 1105, - "x": 191, - "destination": null, - "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions": [ - { - "msg": { - "base": "Thanks @flow.name, we are all done!" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "flow_type": "F", - "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", - "rule_sets": [ - { - "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", - "rules": [ - { - "test": { - "test": { - "base": "Red" - }, - "type": "contains_any" - }, - "category": { - "base": "Red" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "663f667d-561a-4920-9375-3ce367615bdc", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Green" - }, - "type": "contains_any" - }, - "category": { - "base": "Green" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Blue" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Navy" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Cyan" - }, - "type": "contains_any" - }, - "category": { - "base": "Cyan" - }, - "destination": null, - "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type": null - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": null, - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": "expression", - "label": "Color", - "operand": "@extra.value", - "finished_key": null, - "y": 329, - "x": 98, - "config": {} - }, - { - "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", - "rules": [ - { - "test": { - "test": { - "base": "Mutzig" - }, - "type": "contains_any" - }, - "category": { - "base": "Mutzig" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Primus" - }, - "type": "contains_any" - }, - "category": { - "base": "Primus" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Turbo King" - }, - "type": "contains_any" - }, - "category": { - "base": "Turbo King" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Skol" - }, - "type": "contains_any" - }, - "category": { - "base": "Skol" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": "expression", - "label": "Beer", - "y": 687, - "finished_key": null, - "operand": "@(LOWER(step.value))", - "x": 112, - "config": {} - }, - { - "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type": "A" - } - ], - "ruleset_type": "wait_message", - "label": "Name", - "y": 1002, - "finished_key": null, - "operand": "@step.value", - "x": 191, - "config": {} - }, - { - "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": "wait_message", - "label": "Color Response", - "y": 129, - "finished_key": null, - "operand": "@step.value", - "x": 98, - "config": {} - }, - { - "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "rules": [ - { - "category": { - "base": "Success" - }, - "test": { - "status": "success", - "type": "webhook_status" - }, - "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - }, - { - "category": { - "base": "Failure" - }, - "test": { - "status": "failure", - "type": "webhook_status" - }, - "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid": "cb902a40-780f-4e9b-a31e-e7d1021d05ed", - "destination_type": null - } - ], - "ruleset_type": "webhook", - "label": "Color Webhook", - "y": 229, - "finished_key": null, - "operand": "@step.value", - "x": 98, - "config": { - "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "webhook_action": "POST" - } - }, - { - "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": "wait_message", - "label": "Beer Response", - "operand": "@step.value", - "finished_key": null, - "y": 587, - "x": 112, - "config": {} - } - ], - "metadata": { - "uuid": null, - "notes": [], - "expires": 720, - "name": "Favorites", - "saved_on": null, - "revision": 1 - } - } - ], - "triggers":[ - - ] -} \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v4.json b/media/test_flows/favorites_bad_group_name_v4.json deleted file mode 100644 index c545eed0e39..00000000000 --- a/media/test_flows/favorites_bad_group_name_v4.json +++ /dev/null @@ -1,342 +0,0 @@ -{ - "version": 4, - "flows": [ - { - "definition": { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", - "actions": [ - { - "msg": { - "base": "What is your favorite color?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type": "add_group", - "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups": [{"name": "< 25", "id": 15572}] - }, - { - "type": "del_group", - "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups": [{"name": "> 100", "id": 15573}] - } - ] - }, - { - "y": 237, - "x": 131, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions": [ - { - "msg": { - "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y": 8, - "x": 456, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions": [ - { - "msg": { - "base": "I don't know that color. Try again." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y": 535, - "x": 191, - "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions": [ - { - "msg": { - "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y": 265, - "x": 512, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions": [ - { - "msg": { - "base": "I don't know that one, try again please." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y": 805, - "x": 191, - "destination": null, - "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions": [ - { - "msg": { - "base": "Thanks @flow.name, we are all done!" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "rule_sets": [ - { - "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules": [ - { - "test": { - "test": { - "base": "Red" - }, - "type": "contains_any" - }, - "category": { - "base": "Red" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "663f667d-561a-4920-9375-3ce367615bdc", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Green" - }, - "type": "contains_any" - }, - "category": { - "base": "Green" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Blue" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Navy" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Cyan" - }, - "type": "contains_any" - }, - "category": { - "base": "Cyan" - }, - "destination": null, - "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type": null - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": null, - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": null, - "label": "Color", - "finished_key": null, - "response_type": "C", - "y": 129, - "x": 98, - "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "webhook_action": "POST", - "operand": "@extra.value", - "config": {} - }, - { - "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules": [ - { - "test": { - "test": { - "base": "Mutzig" - }, - "type": "contains_any" - }, - "category": { - "base": "Mutzig" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Primus" - }, - "type": "contains_any" - }, - "category": { - "base": "Primus" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Turbo King" - }, - "type": "contains_any" - }, - "category": { - "base": "Turbo King" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Skol" - }, - "type": "contains_any" - }, - "category": { - "base": "Skol" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": null, - "label": "Beer", - "operand": "@step.value|lower_case", - "finished_key": null, - "response_type": "C", - "y": 387, - "x": 112, - "config": {} - }, - { - "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type": "A" - } - ], - "ruleset_type": null, - "label": "Name", - "operand": "@step.value", - "finished_key": null, - "response_type": "C", - "y": 702, - "x": 191, - "config": {} - } - ], - "metadata": { - "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", - "notes": [], - "expires": 720, - "name": "Favorites", - "revision": 1, - "saved_on": "2017-08-16T23:10:18.579169Z" - } - }, - "version": 4, - "flow_type": "F", - "name": "Favorites", - "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v5.json b/media/test_flows/favorites_bad_group_name_v5.json deleted file mode 100644 index 50a3d96cc81..00000000000 --- a/media/test_flows/favorites_bad_group_name_v5.json +++ /dev/null @@ -1,449 +0,0 @@ -{ - "version":5, - "flows":[ - { - "definition":{ - "base_language":"base", - "rule_sets":[ - { - "uuid":"c564c56f-0341-471e-8bb1-e303090fea6a", - "rules":[ - { - "test":{ - "test":{ - "base":"Red" - }, - "type":"contains_any" - }, - "category":{ - "base":"Red" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"663f667d-561a-4920-9375-3ce367615bdc", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Green" - }, - "type":"contains_any" - }, - "category":{ - "base":"Green" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Blue" - }, - "type":"contains_any" - }, - "category":{ - "base":"Blue" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Navy" - }, - "type":"contains_any" - }, - "category":{ - "base":"Blue" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Cyan" - }, - "type":"contains_any" - }, - "category":{ - "base":"Cyan" - }, - "destination":null, - "uuid":"cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type":null - }, - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"Other" - }, - "destination":null, - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "ruleset_type":"expression", - "label":"Color", - "operand":"@extra.value", - "finished_key":null, - "y":329, - "x":98, - "config":{ - - } - }, - { - "uuid":"8b941374-1b65-4154-afa3-27b871f7be6b", - "rules":[ - { - "test":{ - "test":{ - "base":"Mutzig" - }, - "type":"contains_any" - }, - "category":{ - "base":"Mutzig" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Primus" - }, - "type":"contains_any" - }, - "category":{ - "base":"Primus" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Turbo King" - }, - "type":"contains_any" - }, - "category":{ - "base":"Turbo King" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Skol" - }, - "type":"contains_any" - }, - "category":{ - "base":"Skol" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type":"A" - }, - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"Other" - }, - "destination":"7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid":"fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type":"A" - } - ], - "ruleset_type":"expression", - "label":"Beer", - "y":687, - "finished_key":null, - "operand":"@step.value|lower_case", - "x":112, - "config":{ - - } - }, - { - "uuid":"c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid":"cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type":"A" - } - ], - "ruleset_type":"wait_message", - "label":"Name", - "y":1002, - "finished_key":null, - "operand":"@step.value", - "x":191, - "config":{ - - } - }, - { - "uuid":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "ruleset_type":"wait_message", - "label":"Color Response", - "y":129, - "finished_key":null, - "operand":"@step.value", - "x":98, - "config":{ - - } - }, - { - "uuid":"c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "webhook_action":"POST", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "webhook":"http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "ruleset_type":"webhook", - "label":"Color Webhook", - "y":229, - "finished_key":null, - "operand":"@step.value", - "x":98, - "config":{ - - } - }, - { - "uuid":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"8b941374-1b65-4154-afa3-27b871f7be6b", - "uuid":"fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type":"A" - } - ], - "ruleset_type":"wait_message", - "label":"Beer Response", - "operand":"@step.value", - "finished_key":null, - "y":587, - "x":112, - "config":{ - - } - } - ], - "action_sets":[ - { - "y":0, - "x":100, - "destination":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid":"a6676605-332a-4309-a8b8-79b33e73adcd", - "actions":[ - { - "msg":{ - "base":"What is your favorite color?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type":"add_group", - "uuid":"5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups":[ - { - "name":"< 25", - "id":15572 - } - ] - }, - { - "type":"del_group", - "uuid":"2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups":[ - { - "name":"> 100", - "id":15573 - } - ] - } - ] - }, - { - "y":437, - "x":131, - "destination":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions":[ - { - "msg":{ - "base":"Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y":8, - "x":456, - "destination":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid":"37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions":[ - { - "msg":{ - "base":"I don't know that color. Try again." - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y":835, - "x":191, - "destination":"c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions":[ - { - "msg":{ - "base":"Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y":465, - "x":512, - "destination":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid":"7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions":[ - { - "msg":{ - "base":"I don't know that one, try again please." - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y":1105, - "x":191, - "destination":null, - "uuid":"fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions":[ - { - "msg":{ - "base":"Thanks @flow.name, we are all done!" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "metadata":{ - "uuid":"77ae372d-a937-4d9b-a703-cc1c75c4c6f1", - "notes":[ - - ], - "expires":720, - "name":"Favorites", - "saved_on":"2017-08-16T23:10:18.579169Z", - "revision":1 - } - }, - "version":5, - "flow_type":"F", - "name":"Favorites", - "entry":"a6676605-332a-4309-a8b8-79b33e73adcd" - } - ], - "triggers":[ - - ] - } \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v6.json b/media/test_flows/favorites_bad_group_name_v6.json deleted file mode 100644 index 8ac98bf704f..00000000000 --- a/media/test_flows/favorites_bad_group_name_v6.json +++ /dev/null @@ -1,450 +0,0 @@ -{ - "version":6, - "flows":[ - { - "definition":{ - "base_language":"base", - "rule_sets":[ - { - "uuid":"c564c56f-0341-471e-8bb1-e303090fea6a", - "rules":[ - { - "test":{ - "test":{ - "base":"Red" - }, - "type":"contains_any" - }, - "category":{ - "base":"Red" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"663f667d-561a-4920-9375-3ce367615bdc", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Green" - }, - "type":"contains_any" - }, - "category":{ - "base":"Green" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Blue" - }, - "type":"contains_any" - }, - "category":{ - "base":"Blue" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Navy" - }, - "type":"contains_any" - }, - "category":{ - "base":"Blue" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Cyan" - }, - "type":"contains_any" - }, - "category":{ - "base":"Cyan" - }, - "destination":null, - "uuid":"cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type":null - }, - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"Other" - }, - "destination":null, - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "ruleset_type":"expression", - "label":"Color", - "y":329, - "finished_key":null, - "operand":"@extra.value", - "x":98, - "config":{ - - } - }, - { - "uuid":"8b941374-1b65-4154-afa3-27b871f7be6b", - "rules":[ - { - "test":{ - "test":{ - "base":"Mutzig" - }, - "type":"contains_any" - }, - "category":{ - "base":"Mutzig" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Primus" - }, - "type":"contains_any" - }, - "category":{ - "base":"Primus" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Turbo King" - }, - "type":"contains_any" - }, - "category":{ - "base":"Turbo King" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Skol" - }, - "type":"contains_any" - }, - "category":{ - "base":"Skol" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type":"A" - }, - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"Other" - }, - "destination":"7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid":"fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type":"A" - } - ], - "ruleset_type":"expression", - "label":"Beer", - "operand":"@step.value|lower_case", - "finished_key":null, - "y":687, - "x":112, - "config":{ - - } - }, - { - "uuid":"c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid":"cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type":"A" - } - ], - "ruleset_type":"wait_message", - "label":"Name", - "operand":"@step.value", - "finished_key":null, - "y":1002, - "x":191, - "config":{ - - } - }, - { - "uuid":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "ruleset_type":"wait_message", - "label":"Color Response", - "operand":"@step.value", - "finished_key":null, - "y":129, - "x":98, - "config":{ - - } - }, - { - "uuid":"c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "webhook_action":"POST", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "webhook":"http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "ruleset_type":"webhook", - "label":"Color Webhook", - "operand":"@step.value", - "finished_key":null, - "y":229, - "x":98, - "config":{ - - } - }, - { - "uuid":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"8b941374-1b65-4154-afa3-27b871f7be6b", - "uuid":"fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type":"A" - } - ], - "ruleset_type":"wait_message", - "label":"Beer Response", - "y":587, - "finished_key":null, - "operand":"@step.value", - "x":112, - "config":{ - - } - } - ], - "action_sets":[ - { - "y":0, - "x":100, - "destination":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid":"a6676605-332a-4309-a8b8-79b33e73adcd", - "actions":[ - { - "msg":{ - "base":"What is your favorite color?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type":"add_group", - "uuid":"5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups":[ - { - "name":"< 25", - "id":15572 - } - ] - }, - { - "type":"del_group", - "uuid":"2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups":[ - { - "name":"> 100", - "id":15573 - } - ] - } - ] - }, - { - "y":437, - "x":131, - "destination":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions":[ - { - "msg":{ - "base":"Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y":8, - "x":456, - "destination":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid":"37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions":[ - { - "msg":{ - "base":"I don't know that color. Try again." - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y":835, - "x":191, - "destination":"c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions":[ - { - "msg":{ - "base":"Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y":465, - "x":512, - "destination":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid":"7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions":[ - { - "msg":{ - "base":"I don't know that one, try again please." - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y":1105, - "x":191, - "destination":null, - "uuid":"fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions":[ - { - "msg":{ - "base":"Thanks @flow.name, we are all done!" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "entry":"a6676605-332a-4309-a8b8-79b33e73adcd", - "metadata":{ - "uuid":"77ae372d-a937-4d9b-a703-cc1c75c4c6f1", - "notes":[ - - ], - "expires":720, - "name":"Favorites", - "saved_on":"2017-08-16T23:10:18.579169Z", - "revision":1 - } - }, - "version":6, - "flow_type":"F", - "name":"Favorites", - "entry":"a6676605-332a-4309-a8b8-79b33e73adcd" - } - ], - "triggers":[ - - ] - } \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v7.json b/media/test_flows/favorites_bad_group_name_v7.json deleted file mode 100644 index d573c0524cb..00000000000 --- a/media/test_flows/favorites_bad_group_name_v7.json +++ /dev/null @@ -1,446 +0,0 @@ -{ - "version":7, - "flows":[ - { - "base_language":"base", - "action_sets":[ - { - "y":0, - "x":100, - "destination":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid":"a6676605-332a-4309-a8b8-79b33e73adcd", - "actions":[ - { - "msg":{ - "base":"What is your favorite color?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type":"add_group", - "uuid":"5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups":[ - { - "name":"< 25", - "id":15572 - } - ] - }, - { - "type":"del_group", - "uuid":"2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups":[ - { - "name":"> 100", - "id":15573 - } - ] - } - ] - }, - { - "y":437, - "x":131, - "destination":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions":[ - { - "msg":{ - "base":"Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y":8, - "x":456, - "destination":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid":"37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions":[ - { - "msg":{ - "base":"I don't know that color. Try again." - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y":835, - "x":191, - "destination":"c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions":[ - { - "msg":{ - "base":"Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y":465, - "x":512, - "destination":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid":"7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions":[ - { - "msg":{ - "base":"I don't know that one, try again please." - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y":1105, - "x":191, - "destination":null, - "uuid":"fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions":[ - { - "msg":{ - "base":"Thanks @flow.name, we are all done!" - }, - "media":{ - - }, - "send_all":false, - "type":"reply", - "uuid":"512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "flow_type":"F", - "entry":"a6676605-332a-4309-a8b8-79b33e73adcd", - "rule_sets":[ - { - "uuid":"c564c56f-0341-471e-8bb1-e303090fea6a", - "rules":[ - { - "test":{ - "test":{ - "base":"Red" - }, - "type":"contains_any" - }, - "category":{ - "base":"Red" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"663f667d-561a-4920-9375-3ce367615bdc", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Green" - }, - "type":"contains_any" - }, - "category":{ - "base":"Green" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Blue" - }, - "type":"contains_any" - }, - "category":{ - "base":"Blue" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Navy" - }, - "type":"contains_any" - }, - "category":{ - "base":"Blue" - }, - "destination":"00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid":"08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Cyan" - }, - "type":"contains_any" - }, - "category":{ - "base":"Cyan" - }, - "destination":null, - "uuid":"cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type":null - }, - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"Other" - }, - "destination":null, - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "ruleset_type":"expression", - "label":"Color", - "operand":"@extra.value", - "finished_key":null, - "y":329, - "x":98, - "config":{ - - } - }, - { - "uuid":"8b941374-1b65-4154-afa3-27b871f7be6b", - "rules":[ - { - "test":{ - "test":{ - "base":"Mutzig" - }, - "type":"contains_any" - }, - "category":{ - "base":"Mutzig" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Primus" - }, - "type":"contains_any" - }, - "category":{ - "base":"Primus" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Turbo King" - }, - "type":"contains_any" - }, - "category":{ - "base":"Turbo King" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type":"A" - }, - { - "test":{ - "test":{ - "base":"Skol" - }, - "type":"contains_any" - }, - "category":{ - "base":"Skol" - }, - "destination":"92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid":"c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type":"A" - }, - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"Other" - }, - "destination":"7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid":"fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type":"A" - } - ], - "ruleset_type":"expression", - "label":"Beer", - "y":687, - "finished_key":null, - "operand":"@step.value|lower_case", - "x":112, - "config":{ - - } - }, - { - "uuid":"c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid":"cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type":"A" - } - ], - "ruleset_type":"wait_message", - "label":"Name", - "y":1002, - "finished_key":null, - "operand":"@step.value", - "x":191, - "config":{ - - } - }, - { - "uuid":"0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "ruleset_type":"wait_message", - "label":"Color Response", - "y":129, - "finished_key":null, - "operand":"@step.value", - "x":98, - "config":{ - - } - }, - { - "uuid":"c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "webhook_action":"POST", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid":"955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type":null - } - ], - "webhook":"http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "ruleset_type":"webhook", - "label":"Color Webhook", - "y":229, - "finished_key":null, - "operand":"@step.value", - "x":98, - "config":{ - - } - }, - { - "uuid":"58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules":[ - { - "test":{ - "test":"true", - "type":"true" - }, - "category":{ - "base":"All Responses" - }, - "destination":"8b941374-1b65-4154-afa3-27b871f7be6b", - "uuid":"fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type":"A" - } - ], - "ruleset_type":"wait_message", - "label":"Beer Response", - "operand":"@step.value", - "finished_key":null, - "y":587, - "x":112, - "config":{ - - } - } - ], - "metadata":{ - "uuid":null, - "notes":[ - - ], - "expires":720, - "name":"Favorites", - "saved_on":null, - "id":null, - "revision":1 - } - } - ], - "triggers":[ - - ] -} \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v8.json b/media/test_flows/favorites_bad_group_name_v8.json deleted file mode 100644 index 54be9f6846a..00000000000 --- a/media/test_flows/favorites_bad_group_name_v8.json +++ /dev/null @@ -1,420 +0,0 @@ -{ - "version":8, - "flows":[ - { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", - "actions": [ - { - "msg": { - "base": "What is your favorite color?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type": "add_group", - "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups": [ - { - "name": "< 25", - "id": 15572 - } - ] - }, - { - "type": "del_group", - "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups": [ - { - "name": "> 100", - "id": 15573 - } - ] - } - ] - }, - { - "y": 437, - "x": 131, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions": [ - { - "msg": { - "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y": 8, - "x": 456, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions": [ - { - "msg": { - "base": "I don't know that color. Try again." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y": 835, - "x": 191, - "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions": [ - { - "msg": { - "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y": 465, - "x": 512, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions": [ - { - "msg": { - "base": "I don't know that one, try again please." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y": 1105, - "x": 191, - "destination": null, - "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions": [ - { - "msg": { - "base": "Thanks @flow.name, we are all done!" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "flow_type": "F", - "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", - "rule_sets": [ - { - "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", - "rules": [ - { - "test": { - "test": { - "base": "Red" - }, - "type": "contains_any" - }, - "category": { - "base": "Red" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "663f667d-561a-4920-9375-3ce367615bdc", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Green" - }, - "type": "contains_any" - }, - "category": { - "base": "Green" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Blue" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Navy" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Cyan" - }, - "type": "contains_any" - }, - "category": { - "base": "Cyan" - }, - "destination": null, - "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type": null - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": null, - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": "expression", - "label": "Color", - "y": 329, - "finished_key": null, - "operand": "@extra.value", - "x": 98, - "config": {} - }, - { - "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", - "rules": [ - { - "test": { - "test": { - "base": "Mutzig" - }, - "type": "contains_any" - }, - "category": { - "base": "Mutzig" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Primus" - }, - "type": "contains_any" - }, - "category": { - "base": "Primus" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Turbo King" - }, - "type": "contains_any" - }, - "category": { - "base": "Turbo King" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Skol" - }, - "type": "contains_any" - }, - "category": { - "base": "Skol" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": "expression", - "label": "Beer", - "operand": "@(LOWER(step.value))", - "finished_key": null, - "y": 687, - "x": 112, - "config": {} - }, - { - "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type": "A" - } - ], - "ruleset_type": "wait_message", - "label": "Name", - "operand": "@step.value", - "finished_key": null, - "y": 1002, - "x": 191, - "config": {} - }, - { - "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": "wait_message", - "label": "Color Response", - "operand": "@step.value", - "finished_key": null, - "y": 129, - "x": 98, - "config": {} - }, - { - "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "webhook_action": "POST", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "ruleset_type": "webhook", - "label": "Color Webhook", - "operand": "@step.value", - "finished_key": null, - "y": 229, - "x": 98, - "config": {} - }, - { - "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": "wait_message", - "label": "Beer Response", - "y": 587, - "finished_key": null, - "operand": "@step.value", - "x": 112, - "config": {} - } - ], - "metadata": { - "uuid": null, - "notes": [], - "expires": 720, - "name": "Favorites", - "revision": 1, - "id": null, - "saved_on": null - } - } - ], - "triggers":[ - - ] -} \ No newline at end of file diff --git a/media/test_flows/favorites_bad_group_name_v9.json b/media/test_flows/favorites_bad_group_name_v9.json deleted file mode 100644 index 1ff3e4354f8..00000000000 --- a/media/test_flows/favorites_bad_group_name_v9.json +++ /dev/null @@ -1,419 +0,0 @@ -{ - "version":9, - "flows":[ - { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", - "actions": [ - { - "msg": { - "base": "What is your favorite color?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" - }, - { - "type": "add_group", - "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "groups": [ - { - "uuid": "0fdffdb4-3ca4-4d35-b6a7-129b0dfc7d39", - "name": "< 25" - } - ] - }, - { - "type": "del_group", - "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", - "groups": [ - { - "uuid": "e5e7bfaf-7c35-4590-8039-c33da2b98d8c", - "name": "> 100" - } - ] - } - ] - }, - { - "y": 437, - "x": 131, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "actions": [ - { - "msg": { - "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" - } - ] - }, - { - "y": 8, - "x": 456, - "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", - "actions": [ - { - "msg": { - "base": "I don't know that color. Try again." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" - } - ] - }, - { - "y": 835, - "x": 191, - "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "actions": [ - { - "msg": { - "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" - } - ] - }, - { - "y": 465, - "x": 512, - "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "actions": [ - { - "msg": { - "base": "I don't know that one, try again please." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" - } - ] - }, - { - "y": 1105, - "x": 191, - "destination": null, - "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "actions": [ - { - "msg": { - "base": "Thanks @flow.name, we are all done!" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" - } - ] - } - ], - "flow_type": "F", - "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", - "rule_sets": [ - { - "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", - "rules": [ - { - "test": { - "test": { - "base": "Red" - }, - "type": "contains_any" - }, - "category": { - "base": "Red" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "663f667d-561a-4920-9375-3ce367615bdc", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Green" - }, - "type": "contains_any" - }, - "category": { - "base": "Green" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Blue" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Navy" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", - "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Cyan" - }, - "type": "contains_any" - }, - "category": { - "base": "Cyan" - }, - "destination": null, - "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", - "destination_type": null - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": null, - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": "expression", - "label": "Color", - "y": 329, - "finished_key": null, - "operand": "@extra.value", - "x": 98, - "config": {} - }, - { - "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", - "rules": [ - { - "test": { - "test": { - "base": "Mutzig" - }, - "type": "contains_any" - }, - "category": { - "base": "Mutzig" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Primus" - }, - "type": "contains_any" - }, - "category": { - "base": "Primus" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Turbo King" - }, - "type": "contains_any" - }, - "category": { - "base": "Turbo King" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Skol" - }, - "type": "contains_any" - }, - "category": { - "base": "Skol" - }, - "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", - "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": "expression", - "label": "Beer", - "operand": "@(LOWER(step.value))", - "finished_key": null, - "y": 687, - "x": 112, - "config": {} - }, - { - "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", - "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", - "destination_type": "A" - } - ], - "ruleset_type": "wait_message", - "label": "Name", - "operand": "@step.value", - "finished_key": null, - "y": 1002, - "x": 191, - "config": {} - }, - { - "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "ruleset_type": "wait_message", - "label": "Color Response", - "operand": "@step.value", - "finished_key": null, - "y": 129, - "x": 98, - "config": {} - }, - { - "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", - "webhook_action": "POST", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", - "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", - "destination_type": null - } - ], - "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", - "ruleset_type": "webhook", - "label": "Color Webhook", - "operand": "@step.value", - "finished_key": null, - "y": 229, - "x": 98, - "config": {} - }, - { - "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", - "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", - "destination_type": "A" - } - ], - "ruleset_type": "wait_message", - "label": "Beer Response", - "y": 587, - "finished_key": null, - "operand": "@step.value", - "x": 112, - "config": {} - } - ], - "metadata": { - "uuid": null, - "notes": [], - "expires": 720, - "name": "Favorites", - "revision": 1, - "saved_on": null - } - } - ], - "triggers":[ - - ] -} \ No newline at end of file diff --git a/media/test_flows/favorites_timeout.json b/media/test_flows/favorites_timeout.json deleted file mode 100644 index 276578cf45f..00000000000 --- a/media/test_flows/favorites_timeout.json +++ /dev/null @@ -1,341 +0,0 @@ -{ - "version": 7, - "flows": [ - { - "version": 7, - "flow_type": "M", - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "2bff5c33-9d29-4cfc-8bb7-0a1b9f97d830", - "uuid": "127f3736-77ce-4006-9ab0-0c07cea88956", - "actions": [ - { - "msg": { - "base": "What is your favorite color?" - }, - "type": "reply" - } - ] - }, - { - "y": 237, - "x": 131, - "destination": "a5fc5f8a-f562-4b03-a54f-51928f9df07e", - "uuid": "44471ade-7979-4c94-8028-6cfb68836337", - "actions": [ - { - "msg": { - "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" - }, - "type": "reply" - } - ] - }, - { - "y": 8, - "x": 456, - "destination": "2bff5c33-9d29-4cfc-8bb7-0a1b9f97d830", - "uuid": "f9adf38f-ab18-49d3-a8ac-db2fe8f1e77f", - "actions": [ - { - "msg": { - "base": "I don't know that color. Try again." - }, - "type": "reply" - } - ] - }, - { - "y": 535, - "x": 191, - "destination": "ba95c5cd-e428-4a15-8b4b-23dd43943f2c", - "uuid": "89c5624e-3320-4668-a066-308865133080", - "actions": [ - { - "msg": { - "base": "Mmmmm... delicious @flow.beer.category. If only they made @flow.color|lower_case @flow.beer.category! Lastly, what is your name?" - }, - "type": "reply" - } - ] - }, - { - "y": 265, - "x": 512, - "destination": "a5fc5f8a-f562-4b03-a54f-51928f9df07e", - "uuid": "a269683d-8229-4870-8585-be8320b9d8ca", - "actions": [ - { - "msg": { - "base": "I don't know that one, try again please." - }, - "type": "reply" - } - ] - }, - { - "y": 805, - "x": 191, - "destination": null, - "uuid": "10e483a8-5ffb-4c4f-917b-d43ce86c1d65", - "actions": [ - { - "msg": { - "base": "Thanks @flow.name, we are all done!" - }, - "type": "reply" - } - ] - }, - { - "uuid": "ba96d1c6-c721-470a-a04c-74015b1fdd35", - "x": 752, - "y": 1278, - "destination": null, - "actions": [ - { - "type": "reply", - "msg": { - "base": "Sorry you can't participate right now, I'll try again later." - } - } - ] - } - ], - "last_saved": "2015-09-15T02:37:08.805578Z", - "entry": "127f3736-77ce-4006-9ab0-0c07cea88956", - "rule_sets": [ - { - "uuid": "2bff5c33-9d29-4cfc-8bb7-0a1b9f97d830", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "base": "Red" - }, - "type": "contains_any" - }, - "category": { - "base": "Red" - }, - "destination": "44471ade-7979-4c94-8028-6cfb68836337", - "uuid": "8cd25a3f-0be2-494b-8b4c-3a4f0de7f9b2", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Green" - }, - "type": "contains_any" - }, - "category": { - "base": "Green" - }, - "destination": "44471ade-7979-4c94-8028-6cfb68836337", - "uuid": "db2863cf-7fda-4489-9345-d44dacf4e750", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Blue" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "44471ade-7979-4c94-8028-6cfb68836337", - "uuid": "2f462678-b176-49c1-bb5c-6e152502b0db", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Navy" - }, - "type": "contains_any" - }, - "category": { - "base": "Blue" - }, - "destination": "44471ade-7979-4c94-8028-6cfb68836337", - "uuid": "ecaeb59a-d7f1-4c21-a207-b2a29cc2488f", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Cyan" - }, - "type": "contains_any" - }, - "category": { - "base": "Cyan" - }, - "destination": null, - "uuid": "6f463a78-b176-49c1-bb5c-6e152502b0db", - "destination_type": null - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "f9adf38f-ab18-49d3-a8ac-db2fe8f1e77f", - "uuid": "df4455c2-806b-4af4-8ea9-f40278ec10e4", - "destination_type": "A" - }, - { - "uuid": "1023e76b-bd81-4720-a95e-a54a8fc3c328", - "category": { - "base": "No Response" - }, - "destination": "ba96d1c6-c721-470a-a04c-74015b1fdd35", - "destination_type": "A", - "test": { - "type": "timeout", - "minutes": 5 - } - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Color", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 129, - "x": 98, - "config": {} - }, - { - "uuid": "a5fc5f8a-f562-4b03-a54f-51928f9df07e", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "base": "Mutzig" - }, - "type": "contains_any" - }, - "category": { - "base": "Mutzig" - }, - "destination": "89c5624e-3320-4668-a066-308865133080", - "uuid": "ea304225-332e-49d4-9768-1e804cd0b6c2", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Primus" - }, - "type": "contains_any" - }, - "category": { - "base": "Primus" - }, - "destination": "89c5624e-3320-4668-a066-308865133080", - "uuid": "57f8688e-c263-43d7-bd06-bdb98f0c58a8", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Turbo King" - }, - "type": "contains_any" - }, - "category": { - "base": "Turbo King" - }, - "destination": "89c5624e-3320-4668-a066-308865133080", - "uuid": "670f0205-bb39-4e12-ae95-5e29251b8a3e", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Skol" - }, - "type": "contains_any" - }, - "category": { - "base": "Skol" - }, - "destination": "89c5624e-3320-4668-a066-308865133080", - "uuid": "2ff4713f-c62f-445c-880c-de8f6532d090", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "a269683d-8229-4870-8585-be8320b9d8ca", - "uuid": "1fc4c133-d038-4f75-a69e-6e7e3190e5d8", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Beer", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 387, - "x": 112, - "config": {} - }, - { - "uuid": "ba95c5cd-e428-4a15-8b4b-23dd43943f2c", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "destination": "10e483a8-5ffb-4c4f-917b-d43ce86c1d65", - "uuid": "c072ecb5-0686-40ea-8ed3-898dc1349783", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Name", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 702, - "x": 191, - "config": {} - } - ], - "metadata": { - "notes": [], - "name": "Favorites", - "id": 35559, - "expires": 720, - "revision": 1 - } - } - ], - "triggers": [] -} diff --git a/media/test_flows/flow_starts.json b/media/test_flows/flow_starts.json deleted file mode 100644 index 702c333367a..00000000000 --- a/media/test_flows/flow_starts.json +++ /dev/null @@ -1,141 +0,0 @@ -{ - "campaigns": [], - "version": 4, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "f4eba3d0-3bfc-4564-a3ee-662cb0fac950", - "uuid": "f1eaf53b-27d8-4e93-818e-4c4808b21976", - "actions": [] - } - ], - "last_saved": "2015-03-31T13:24:32.741812Z", - "entry": "f1eaf53b-27d8-4e93-818e-4c4808b21976", - "rule_sets": [ - { - "uuid": "f4eba3d0-3bfc-4564-a3ee-662cb0fac950", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses", - "eng": "Other" - }, - "destination": null, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - }, - "uuid": "e0d2f9fb-a300-4a21-b258-e594bd260310" - } - ], - "webhook": null, - "label": "Message Response", - "operand": "@step.value", - "finished_key": null, - "response_type": "C", - "y": 0, - "x": 200 - } - ], - "metadata": {} - }, - "id": 13968, - "flow_type": "F", - "name": "Child Flow" - }, - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 163, - "x": 201, - "destination": null, - "uuid": "59fa8d6c-50fd-4686-a45f-d9a30f616b80", - "actions": [ - { - "type": "flow", - "name": "Child Flow", - "id": 13968 - } - ] - } - ], - "last_saved": "2015-03-31T13:27:02.568148Z", - "entry": "f4eba3d0-3bfc-4564-a3ee-662cb0fac9f0", - "rule_sets": [ - { - "uuid": "f4eba3d0-3bfc-4564-a3ee-662cb0fac9f0", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": { - "eng": "Foo" - }, - "base": "Foo", - "type": "contains_any" - }, - "category": { - "base": "Foo", - "eng": "Foo" - }, - "config": { - "type": "contains_any", - "verbose_name": "has any of these words", - "name": "Contains any", - "localized": true, - "operands": 1 - }, - "uuid": "0265fb76-e6df-458f-8727-0b2d08f040ec" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses", - "eng": "Other" - }, - "destination": "59fa8d6c-50fd-4686-a45f-d9a30f616b80", - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - }, - "uuid": "e0d2f9fb-a300-4a21-b258-e594bd26030d" - } - ], - "webhook": null, - "label": "Response 1", - "operand": "@contact.groups", - "finished_key": null, - "response_type": "C", - "y": 0, - "x": 129 - } - ], - "metadata": {} - }, - "id": 600, - "flow_type": "F", - "name": "Parent Flow" - } - ], - "triggers": [] -} diff --git a/media/test_flows/group_split.json b/media/test_flows/group_split.json deleted file mode 100644 index 02be28c5123..00000000000 --- a/media/test_flows/group_split.json +++ /dev/null @@ -1,296 +0,0 @@ -{ - "campaigns": [], - "version": 10, - "site": "https://textit.in", - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 83, - "destination": "fa26edf9-e78f-4131-b5df-3bd84a1390f5", - "uuid": "1bf3f286-be43-45c3-8146-020a8f224591", - "actions": [ - { - "msg": { - "eng": "Group management! Toggle group membership with:\n(Add|Remove) \n\n@contact.groups" - }, - "type": "reply" - } - ] - }, - { - "y": 376, - "x": 545, - "destination": "403ded9b-edcc-4392-adf1-3de81088cdc1", - "uuid": "21b3cd69-e67c-402a-b977-566f94e8e7ec", - "actions": [ - { - "type": "add_group", - "groups": [ - "@flow.group_name" - ] - } - ] - }, - { - "y": 375, - "x": 786, - "destination": "403ded9b-edcc-4392-adf1-3de81088cdc1", - "uuid": "60d74019-f9c2-4665-bcab-0a4ba659b6af", - "actions": [ - { - "type": "del_group", - "groups": [ - "@flow.group_name" - ] - } - ] - }, - { - "y": 24, - "x": 733, - "destination": "1bf3f286-be43-45c3-8146-020a8f224591", - "uuid": "514fc3f0-fd04-4c27-92fb-bc348121c8ef", - "actions": [ - { - "msg": { - "eng": "Sorry, don't get that command." - }, - "type": "reply" - } - ] - }, - { - "y": 493, - "x": 177, - "destination": "403ded9b-edcc-4392-adf1-3de81088cdc1", - "uuid": "a9cd6587-61a7-4ab6-9a20-edf1f9cff033", - "actions": [ - { - "msg": { - "eng": "You are in @flow.member.category" - }, - "type": "reply" - } - ] - }, - { - "y": 466, - "x": 397, - "destination": "403ded9b-edcc-4392-adf1-3de81088cdc1", - "uuid": "f0c02e2b-6f24-48a7-9c8b-d49165387014", - "actions": [ - { - "msg": { - "eng": "You aren't in either group." - }, - "type": "reply" - } - ] - }, - { - "y": 587, - "x": 545, - "destination": "fa26edf9-e78f-4131-b5df-3bd84a1390f5", - "uuid": "403ded9b-edcc-4392-adf1-3de81088cdc1", - "actions": [ - { - "msg": { - "eng": "Awaiting command." - }, - "type": "reply" - } - ] - } - ], - "version": 10, - "flow_type": "F", - "entry": "1bf3f286-be43-45c3-8146-020a8f224591", - "rule_sets": [ - { - "uuid": "fa26edf9-e78f-4131-b5df-3bd84a1390f5", - "rules": [ - { - "test": { - "test": { - "eng": "split" - }, - "type": "starts" - }, - "category": { - "eng": "Split" - }, - "destination": "ec361b3c-7979-4cfb-b7ca-997d985aba40", - "uuid": "ef6d80e8-775d-4872-8dfc-4f61ae09c814", - "destination_type": "R" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "Other" - }, - "destination": "e7feaa40-b815-4619-b7b2-4a28c8fd4d10", - "uuid": "bd9ee747-fb19-4752-8dd5-d4a3f666b278", - "destination_type": "R" - } - ], - "ruleset_type": "wait_message", - "label": "Response", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 139, - "x": 351, - "config": {} - }, - { - "uuid": "2e616cfc-217d-4782-9522-c2bb0ee38ff8", - "rules": [ - { - "test": { - "test": { - "eng": "add" - }, - "type": "starts" - }, - "category": { - "eng": "Add" - }, - "destination": "21b3cd69-e67c-402a-b977-566f94e8e7ec", - "uuid": "51280d07-a741-4d2a-8b7c-199739e7f17e", - "destination_type": "A" - }, - { - "test": { - "test": { - "eng": "remove" - }, - "type": "starts" - }, - "category": { - "eng": "Remove" - }, - "destination": "60d74019-f9c2-4665-bcab-0a4ba659b6af", - "uuid": "90d49357-be6f-4a26-a92c-35571b949bf0", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "Other" - }, - "destination": "514fc3f0-fd04-4c27-92fb-bc348121c8ef", - "uuid": "1af92c00-e94f-4a49-97d2-1486083f2342", - "destination_type": "A" - } - ], - "ruleset_type": "expression", - "label": "Response 3", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 257, - "x": 633, - "config": {} - }, - { - "uuid": "e7feaa40-b815-4619-b7b2-4a28c8fd4d10", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "2e616cfc-217d-4782-9522-c2bb0ee38ff8", - "uuid": "ba782061-da19-4f97-9a64-d8b22c78641d", - "destination_type": "R" - } - ], - "ruleset_type": "expression", - "label": "Group Name", - "operand": "@(REMOVE_FIRST_WORD(step.value))", - "finished_key": null, - "response_type": "", - "y": 140, - "x": 671, - "config": {} - }, - { - "uuid": "ec361b3c-7979-4cfb-b7ca-997d985aba40", - "rules": [ - { - "test": { - "test": { - "name": "Group A", - "uuid": "ebccb83d-f407-4e66-86ff-b174c952b7d3" - }, - "type": "in_group" - }, - "category": { - "eng": "Group A" - }, - "destination": "a9cd6587-61a7-4ab6-9a20-edf1f9cff033", - "uuid": "e7da6d33-8b82-4d42-8b20-454a4460f0f6", - "destination_type": "A" - }, - { - "test": { - "test": { - "name": "Group B", - "uuid": "61d455f9-52e5-40c7-ae88-809644ffb028" - }, - "type": "in_group" - }, - "category": { - "eng": "Group B" - }, - "destination": "a9cd6587-61a7-4ab6-9a20-edf1f9cff033", - "uuid": "2b240091-2cc5-45e4-ad18-ade35d0bd320", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "Other" - }, - "destination": "f0c02e2b-6f24-48a7-9c8b-d49165387014", - "uuid": "66410d8d-a539-4e9a-b039-717da23bbdd2", - "destination_type": "A" - } - ], - "ruleset_type": "group", - "label": "Member", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 255, - "x": 196, - "config": {} - } - ], - "metadata": { - "uuid": "2d60a782-5805-488b-bf4d-b8154614c170", - "notes": [], - "expires": 10080, - "name": "Grouppo", - "saved_on": "2016-09-14T22:51:59.257419Z", - "revision": 304 - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/ivr_v3.json b/media/test_flows/ivr_v3.json deleted file mode 100644 index d4b3aec48fc..00000000000 --- a/media/test_flows/ivr_v3.json +++ /dev/null @@ -1,161 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "entry": "d1dd7b53-dafe-493f-a267-62301e76ee85", - "rule_sets": [ - { - "uuid": "c939d4cf-0294-4478-952b-a630ba972ba1", - "webhook_action": null, - "response_type": "C", - "rules": [ - { - "test": { - "test": "1", - "type": "eq" - }, - "category": "Yes", - "destination": "92367194-924b-4c47-9250-e47363855e32", - "uuid": "4cdf62ea-5cba-4261-992c-246c34667dc3" - }, - { - "test": { - "test": "2", - "type": "eq" - }, - "category": "No", - "destination": "866e80ae-128e-4e49-98b9-51317ec847e3", - "uuid": "a9b6086e-a423-4790-a342-df2c9972fc8c" - }, - { - "test": { - "test": "3", - "type": "eq" - }, - "category": "Maybe", - "destination": "096de08e-b260-4025-a2fd-f61996a3f4eb", - "uuid": "a4e661de-9ec1-424d-a383-362a456925e0" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": "13549d38-f341-4ad5-ad44-1e4e5cedd032", - "uuid": "f8d4e9b0-846a-4508-a3d7-2f910fa04fc0" - } - ], - "webhook": null, - "label": "Call Me", - "operand": "@step.value", - "finished_key": null, - "y": 165, - "x": 204 - } - ], - "action_sets": [ - { - "y": 91, - "x": 655, - "destination": "c939d4cf-0294-4478-952b-a630ba972ba1", - "uuid": "13549d38-f341-4ad5-ad44-1e4e5cedd032", - "actions": [ - { - "recording": null, - "msg": "Press one, two, or three. Thanks.", - "type": "say", - "uuid": "6d8d0bd4-7b72-4a91-ad78-2ac3a5220637" - } - ] - }, - { - "y": 294, - "x": 531, - "destination": null, - "uuid": "096de08e-b260-4025-a2fd-f61996a3f4eb", - "actions": [ - { - "recording": null, - "msg": "This might be crazy.", - "type": "say", - "uuid": "80cd8158-6e2a-4adb-8ddc-f9b5b036a7ad" - } - ] - }, - { - "y": 294, - "x": 310, - "destination": null, - "uuid": "866e80ae-128e-4e49-98b9-51317ec847e3", - "actions": [ - { - "recording": null, - "msg": "Fine, this is the last time we shall speak.", - "type": "say", - "uuid": "14849fb6-3a7d-41c8-9595-9e97cf17f9dd" - } - ] - }, - { - "y": 291, - "x": 91, - "destination": null, - "uuid": "92367194-924b-4c47-9250-e47363855e32", - "actions": [ - { - "recording": null, - "msg": "Great, I can't wait to give you a call later.", - "type": "say", - "uuid": "cc6c5044-ec52-4861-ba66-b2ee741b668c" - } - ] - }, - { - "y": 0, - "x": 101, - "destination": "c939d4cf-0294-4478-952b-a630ba972ba1", - "uuid": "d1dd7b53-dafe-493f-a267-62301e76ee85", - "actions": [ - { - "recording": null, - "msg": "Would you like me to call you? Press one for yes, two for no, or three for maybe.", - "type": "say", - "uuid": "03290af7-4748-46e7-ac8d-1967375de33a" - } - ] - } - ], - "metadata": { - "notes": [] - } - }, - "id": 100, - "flow_type": "V", - "name": "Call me maybe" - } - ], - "triggers": [ - { - "flow": { - "name": "Call me maybe", - "id": 100 - }, - "groups": [], - "keyword": "callme", - "trigger_type": "K" - }, - { - "flow": { - "name": "Call me maybe", - "id": 100 - }, - "groups": [], - "keyword": null, - "trigger_type": "V" - } - ] -} diff --git a/media/test_flows/legacy/invalid/no_base_language_v8.json b/media/test_flows/legacy/invalid/no_base_language_v8.json new file mode 100644 index 00000000000..69c4cfcea53 --- /dev/null +++ b/media/test_flows/legacy/invalid/no_base_language_v8.json @@ -0,0 +1,44 @@ +{ + "base_language": null, + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": null, + "uuid": "f614e8c9-eeb6-4c94-bd07-b4bbe8a95b47", + "actions": [ + { + "type": "add_group", + "groups": [ + { + "name": "A New Group", + "id": 44899 + } + ] + }, + { + "field": "location", + "type": "save", + "value": "Seattle, WA", + "label": "Location" + }, + { + "lang": "eng", + "type": "lang", + "name": "English" + } + ] + } + ], + "version": 8, + "flow_type": "F", + "entry": "f614e8c9-eeb6-4c94-bd07-b4bbe8a95b47", + "rule_sets": [], + "metadata": { + "expires": 720, + "saved_on": "2015-11-19T00:30:09.477009Z", + "id": 42104, + "name": "Join New Group", + "revision": 6 + } +} \ No newline at end of file diff --git a/media/test_flows/legacy/invalid/non_localized_ruleset.json b/media/test_flows/legacy/invalid/non_localized_ruleset.json new file mode 100644 index 00000000000..746d7dda4cf --- /dev/null +++ b/media/test_flows/legacy/invalid/non_localized_ruleset.json @@ -0,0 +1,39 @@ +{ + "base_language": "eng", + "action_sets": [], + "version": 8, + "flow_type": "F", + "entry": "99696ed8-2555-4d18-ac0b-f9b9d85abf30", + "rule_sets": [ + { + "uuid": "99696ed8-2555-4d18-ac0b-f9b9d85abf30", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": "All Responses", + "uuid": "9b31bbfe-23d7-4838-806a-1a3989de3f37" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Response 1", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 0, + "x": 100, + "config": {} + } + ], + "metadata": { + "expires": 10080, + "revision": 1, + "id": 42135, + "name": "Empty", + "saved_on": "2015-11-19T22:31:15.972687Z" + } +} \ No newline at end of file diff --git a/media/test_flows/legacy/invalid/non_localized_with_language.json b/media/test_flows/legacy/invalid/non_localized_with_language.json new file mode 100644 index 00000000000..60169c3cba0 --- /dev/null +++ b/media/test_flows/legacy/invalid/non_localized_with_language.json @@ -0,0 +1,326 @@ +{ + "base_language": "eng", + "action_sets": [ + { + "y": 991, + "x": 389, + "destination": "7d1b7019-b611-4132-9ba4-af36cc167398", + "uuid": "49189b3e-8e2b-473f-bec2-10378f5a7c06", + "actions": [ + { + "msg": "Thanks @extra.name, we'll be in touch ASAP about order # @extra.order.", + "type": "reply" + }, + { + "msg": "Customer @extra.name has a problem with their order @extra.order for @extra.description. Please look into it ASAP and call them back with the status.\n \nCustomer Comment: \"@flow.comment\"\nCustomer Name: @extra.name\nCustomer Phone: @contact.tel ", + "type": "email", + "emails": [ + "name@domain.com" + ], + "subject": "Order Comment: @flow.lookup: @extra.order" + } + ] + }, + { + "y": 574, + "x": 612, + "destination": "6f550596-98a2-44fb-b769-b3c529f1b963", + "uuid": "8618411e-a35e-472b-b867-3339aa46027a", + "actions": [ + { + "msg": "Uh oh @extra.name! Our record indicate that your order for @extra.description was cancelled on @extra.cancel_date. If you think this is in error, please reply with a comment and our orders department will get right on it!", + "type": "reply" + } + ] + }, + { + "y": 572, + "x": 389, + "destination": "6f550596-98a2-44fb-b769-b3c529f1b963", + "uuid": "32bb903e-44c2-40f9-b65f-c8cda6490ee6", + "actions": [ + { + "msg": "Hi @extra.name. Hope you are patient because we haven't shipped your order for @extra.description yet. We expect to ship it by @extra.ship_date though. If you have any questions, just reply and our customer service department will be notified.", + "type": "reply" + } + ] + }, + { + "y": 572, + "x": 167, + "destination": "6f550596-98a2-44fb-b769-b3c529f1b963", + "uuid": "bf36a209-4e21-44ac-835a-c3d5889aa2fb", + "actions": [ + { + "msg": "Great news @extra.name! We shipped your order for @extra.description on @extra.ship_date and we expect it will be delivered on @extra.delivery_date. If you have any questions, just reply and our customer service department will be notified.", + "type": "reply" + } + ] + }, + { + "y": 99, + "x": 787, + "destination": "69c427a4-b9b6-4f67-9e35-f783b3e81bfd", + "uuid": "7f4c29e3-f022-420d-8e2f-6165c572b991", + "actions": [ + { + "msg": "Sorry that doesn't look like a valid order number. Maybe try: CU001, CU002 or CU003?", + "type": "reply" + } + ] + }, + { + "y": 0, + "x": 409, + "destination": "69c427a4-b9b6-4f67-9e35-f783b3e81bfd", + "uuid": "4f79034a-51e0-4210-99cc-17f385de4de8", + "actions": [ + { + "msg": "Thanks for contacting the ThriftShop order status system. Please send your order # and we'll help you in a jiffy!", + "type": "reply" + } + ] + }, + { + "y": 854, + "x": 776, + "destination": "2cb5adcd-31b1-4d21-a0df-c5375cea1963", + "uuid": "6f550596-98a2-44fb-b769-b3c529f1b963", + "actions": [ + { + "msg": "@flow.lookup_response", + "type": "reply" + } + ] + }, + { + "y": 1430, + "x": 233, + "destination": "ad1d5767-8dfd-4c5d-b2e8-a997adb3a276", + "uuid": "81613e37-414c-4d73-884b-4ee7ae0fd913", + "actions": [ + { + "msg": "asdf", + "type": "reply" + } + ] + } + ], + "version": 8, + "flow_type": "F", + "entry": "4f79034a-51e0-4210-99cc-17f385de4de8", + "rule_sets": [ + { + "uuid": "2cb5adcd-31b1-4d21-a0df-c5375cea1963", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": "All Responses", + "destination": "49189b3e-8e2b-473f-bec2-10378f5a7c06", + "uuid": "088470d7-c4a9-4dd7-8be4-d10faf02fcea", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Comment", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 955, + "x": 762, + "config": {} + }, + { + "uuid": "69c427a4-b9b6-4f67-9e35-f783b3e81bfd", + "webhook_action": null, + "rules": [ + { + "category": "All Responses", + "uuid": "c85136c2-dcdd-4c4b-835d-a083ebde5e07", + "destination": "b3bd5abb-3f70-4af5-85eb-d07900f9cb85", + "destination_type": "R", + "test": { + "test": "true", + "type": "true" + }, + "config": { + "type": "true", + "verbose_name": "contains anything", + "name": "Other", + "operands": 0 + } + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Lookup Responses", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 198, + "x": 356, + "config": {} + }, + { + "uuid": "7d1b7019-b611-4132-9ba4-af36cc167398", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": "All Responses", + "destination": "81613e37-414c-4d73-884b-4ee7ae0fd913", + "uuid": "124f3266-bc62-4743-b4b1-79fee0d45ad9", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Extra Comments", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 1252, + "x": 389, + "config": {} + }, + { + "uuid": "6baa1d6b-ee70-4d7c-85b3-22ed94281227", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "Shipped", + "type": "contains" + }, + "category": "Shipped", + "destination": "bf36a209-4e21-44ac-835a-c3d5889aa2fb", + "uuid": "bb336f83-3a5f-4a2e-ad42-757a0a79892b", + "destination_type": "A" + }, + { + "test": { + "test": "Pending", + "type": "contains" + }, + "category": "Pending", + "destination": "32bb903e-44c2-40f9-b65f-c8cda6490ee6", + "uuid": "91826255-5a81-418c-aadb-3378802a1134", + "destination_type": "A" + }, + { + "test": { + "test": "Cancelled", + "type": "contains" + }, + "category": "Cancelled", + "destination": "8618411e-a35e-472b-b867-3339aa46027a", + "uuid": "1efa73d0-e30c-4495-a5c8-724b48385839", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": "Other", + "destination": "7f4c29e3-f022-420d-8e2f-6165c572b991", + "uuid": "c85136c2-dcdd-4c4b-835d-a083ebde5e07", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "expression", + "label": "Lookup", + "operand": "@extra.status", + "finished_key": null, + "response_type": "", + "y": 398, + "x": 356, + "config": {} + }, + { + "uuid": "b3bd5abb-3f70-4af5-85eb-d07900f9cb85", + "webhook_action": "POST", + "rules": [ + { + "category": "All Responses", + "uuid": "c85136c2-dcdd-4c4b-835d-a083ebde5e07", + "destination": "6baa1d6b-ee70-4d7c-85b3-22ed94281227", + "destination_type": "R", + "test": { + "test": "true", + "type": "true" + }, + "config": { + "type": "true", + "verbose_name": "contains anything", + "name": "Other", + "operands": 0 + } + } + ], + "webhook": "https://api.textit.in/demo/status/", + "ruleset_type": "webhook", + "label": "Lookup Webhook", + "operand": "@extra.status", + "finished_key": null, + "response_type": "", + "y": 298, + "x": 356, + "config": {} + }, + { + "uuid": "ad1d5767-8dfd-4c5d-b2e8-a997adb3a276", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": "All Responses", + "config": { + "type": "true", + "verbose_name": "contains anything", + "name": "Other", + "operands": 0 + }, + "uuid": "439c839b-f04a-4394-9b8b-be91ca0991bd" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Boo", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 1580, + "x": 362, + "config": {} + } + ], + "metadata": { + "uuid": "2ed28d6a-61cd-436a-9159-01b024992e78", + "notes": [ + { + "body": "This flow demonstrates looking up an order using a webhook and giving the user different options based on the results. After looking up the order the user has the option to send additional comments which are forwarded to customer support representatives.\n\nUse order numbers CU001, CU002 or CU003 to see the different cases in action.", + "x": 59, + "y": 0, + "title": "Using Your Own Data" + } + ], + "expires": 720, + "name": "Sample Flow - Order Status Checker", + "saved_on": "2015-11-19T19:32:17.523441Z", + "id": 42133, + "revision": 1 + } +} \ No newline at end of file diff --git a/media/test_flows/legacy/invalid/not_fully_localized.json b/media/test_flows/legacy/invalid/not_fully_localized.json new file mode 100644 index 00000000000..bff2bfbe74e --- /dev/null +++ b/media/test_flows/legacy/invalid/not_fully_localized.json @@ -0,0 +1,31 @@ +{ + "version": 7, + "flow_type": "F", + "base_language": "eng", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": null, + "uuid": "127f3736-77ce-4006-9ab0-0c07cea88956", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "type": "reply" + } + ] + } + ], + "last_saved": "2015-09-15T02:37:08.805578Z", + "entry": "127f3736-77ce-4006-9ab0-0c07cea88956", + "rule_sets": [], + "metadata": { + "notes": [], + "name": "Not fully localized", + "id": 35559, + "expires": 720, + "revision": 1 + } +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/dual_webhook.json b/media/test_flows/legacy/migrations/dual_webhook.json new file mode 100644 index 00000000000..bd081f6ba4b --- /dev/null +++ b/media/test_flows/legacy/migrations/dual_webhook.json @@ -0,0 +1,132 @@ +{ + "campaigns": [], + "version": 9, + "site": "https://textit.in", + "flows": [ + { + "base_language": "eng", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0aabad42-3ec6-40c7-a4cc-c5190b8b4465", + "uuid": "ff642bb5-14fa-4bb6-8040-0ceec395a164", + "actions": [ + { + "msg": { + "eng": "This is the first message" + }, + "type": "reply" + } + ] + }, + { + "y": 310, + "x": 129, + "destination": "6304e1d5-3c0c-44ea-9519-39389227e3c0", + "uuid": "d7523614-1b39-481f-a451-4c4ac9201095", + "actions": [ + { + "msg": { + "eng": "Great, your code is @extra.code. Enter your name" + }, + "type": "reply" + } + ] + } + ], + "version": 9, + "flow_type": "F", + "entry": "ff642bb5-14fa-4bb6-8040-0ceec395a164", + "rule_sets": [ + { + "uuid": "0aabad42-3ec6-40c7-a4cc-c5190b8b4465", + "webhook_action": "POST", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "eng": "All Responses" + }, + "destination": "d7523614-1b39-481f-a451-4c4ac9201095", + "uuid": "1717d336-6fb3-4da0-ac51-4588792e46d2", + "destination_type": "A" + } + ], + "webhook": "http://localhost:49999/code", + "ruleset_type": "webhook", + "label": "Webhook", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 169, + "x": 286, + "config": {} + }, + { + "uuid": "6304e1d5-3c0c-44ea-9519-39389227e3c0", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "eng": "All Responses" + }, + "destination": "8ad78c14-7ebe-4968-82dc-b66dc27d4d96", + "uuid": "da800d48-b1c8-44cf-8e2c-b6c6d5c98aa3", + "destination_type": "R" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 457, + "x": 265, + "config": {} + }, + { + "uuid": "8ad78c14-7ebe-4968-82dc-b66dc27d4d96", + "webhook_action": "GET", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "eng": "All Responses" + }, + "uuid": "4dd0f3e7-cc15-41fa-8a84-d53d76d46d66" + } + ], + "webhook": "http://localhost:49999/success", + "ruleset_type": "webhook", + "label": "Webhook 2", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 617, + "x": 312, + "config": {} + } + ], + "metadata": { + "expires": 10080, + "revision": 16, + "uuid": "099d0d1e-3769-472f-9ea7-f3bd5a11c8ff", + "name": "Webhook Migration", + "saved_on": "2016-08-16T16:34:56.351428Z" + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites.json b/media/test_flows/legacy/migrations/favorites.json new file mode 100644 index 00000000000..8e328ddbd18 --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites.json @@ -0,0 +1,315 @@ +{ + "version": 7, + "flows": [ + { + "version": 7, + "flow_type": "M", + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "2bff5c33-9d29-4cfc-8bb7-0a1b9f97d830", + "uuid": "127f3736-77ce-4006-9ab0-0c07cea88956", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "type": "reply" + } + ] + }, + { + "y": 237, + "x": 131, + "destination": "a5fc5f8a-f562-4b03-a54f-51928f9df07e", + "uuid": "44471ade-7979-4c94-8028-6cfb68836337", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "type": "reply" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "2bff5c33-9d29-4cfc-8bb7-0a1b9f97d830", + "uuid": "f9adf38f-ab18-49d3-a8ac-db2fe8f1e77f", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "type": "reply" + } + ] + }, + { + "y": 535, + "x": 191, + "destination": "ba95c5cd-e428-4a15-8b4b-23dd43943f2c", + "uuid": "89c5624e-3320-4668-a066-308865133080", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @flow.color|lower_case @flow.beer.category! Lastly, what is your name?" + }, + "type": "reply" + } + ] + }, + { + "y": 265, + "x": 512, + "destination": "a5fc5f8a-f562-4b03-a54f-51928f9df07e", + "uuid": "a269683d-8229-4870-8585-be8320b9d8ca", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "type": "reply" + } + ] + }, + { + "y": 805, + "x": 191, + "destination": null, + "uuid": "10e483a8-5ffb-4c4f-917b-d43ce86c1d65", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "type": "reply" + } + ] + } + ], + "last_saved": "2015-09-15T02:37:08.805578Z", + "entry": "127f3736-77ce-4006-9ab0-0c07cea88956", + "rule_sets": [ + { + "uuid": "2bff5c33-9d29-4cfc-8bb7-0a1b9f97d830", + "webhook_action": null, + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "44471ade-7979-4c94-8028-6cfb68836337", + "uuid": "8cd25a3f-0be2-494b-8b4c-3a4f0de7f9b2", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "44471ade-7979-4c94-8028-6cfb68836337", + "uuid": "db2863cf-7fda-4489-9345-d44dacf4e750", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "44471ade-7979-4c94-8028-6cfb68836337", + "uuid": "2f462678-b176-49c1-bb5c-6e152502b0db", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "44471ade-7979-4c94-8028-6cfb68836337", + "uuid": "ecaeb59a-d7f1-4c21-a207-b2a29cc2488f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "6f463a78-b176-49c1-bb5c-6e152502b0db", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "f9adf38f-ab18-49d3-a8ac-db2fe8f1e77f", + "uuid": "df4455c2-806b-4af4-8ea9-f40278ec10e4", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Color", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 129, + "x": 98, + "config": {} + }, + { + "uuid": "a5fc5f8a-f562-4b03-a54f-51928f9df07e", + "webhook_action": null, + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "89c5624e-3320-4668-a066-308865133080", + "uuid": "ea304225-332e-49d4-9768-1e804cd0b6c2", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "89c5624e-3320-4668-a066-308865133080", + "uuid": "57f8688e-c263-43d7-bd06-bdb98f0c58a8", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "89c5624e-3320-4668-a066-308865133080", + "uuid": "670f0205-bb39-4e12-ae95-5e29251b8a3e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "89c5624e-3320-4668-a066-308865133080", + "uuid": "2ff4713f-c62f-445c-880c-de8f6532d090", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "a269683d-8229-4870-8585-be8320b9d8ca", + "uuid": "1fc4c133-d038-4f75-a69e-6e7e3190e5d8", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Beer", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 387, + "x": 112, + "config": {} + }, + { + "uuid": "ba95c5cd-e428-4a15-8b4b-23dd43943f2c", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "10e483a8-5ffb-4c4f-917b-d43ce86c1d65", + "uuid": "c072ecb5-0686-40ea-8ed3-898dc1349783", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "wait_message", + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 702, + "x": 191, + "config": {} + } + ], + "metadata": { + "notes": [], + "name": "Favorites", + "id": 35559, + "expires": 720, + "revision": 1 + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v10.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v10.json new file mode 100644 index 00000000000..3b28a37b65a --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v10.json @@ -0,0 +1,430 @@ +{ + "version": 10, + "flows": [ + { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "uuid": "0fdffdb4-3ca4-4d35-b6a7-129b0dfc7d39", + "name": "< 25" + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "uuid": "e5e7bfaf-7c35-4590-8039-c33da2b98d8c", + "name": "> 100" + } + ] + } + ] + }, + { + "y": 437, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 835, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 465, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 1105, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "flow_type": "F", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", + "rule_sets": [ + { + "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "expression", + "label": "Color", + "operand": "@extra.value", + "finished_key": null, + "y": 329, + "x": 98, + "config": {} + }, + { + "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "expression", + "label": "Beer", + "y": 687, + "finished_key": null, + "operand": "@(LOWER(step.value))", + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Name", + "y": 1002, + "finished_key": null, + "operand": "@step.value", + "x": 191, + "config": {} + }, + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "wait_message", + "label": "Color Response", + "y": 129, + "finished_key": null, + "operand": "@step.value", + "x": 98, + "config": {} + }, + { + "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "rules": [ + { + "category": { + "base": "Success" + }, + "test": { + "status": "success", + "type": "webhook_status" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + }, + { + "category": { + "base": "Failure" + }, + "test": { + "status": "failure", + "type": "webhook_status" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "cb902a40-780f-4e9b-a31e-e7d1021d05ed", + "destination_type": null + } + ], + "ruleset_type": "webhook", + "label": "Color Webhook", + "y": 229, + "finished_key": null, + "operand": "@step.value", + "x": 98, + "config": { + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "webhook_action": "POST" + } + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Beer Response", + "operand": "@step.value", + "finished_key": null, + "y": 587, + "x": 112, + "config": {} + } + ], + "metadata": { + "uuid": null, + "notes": [], + "expires": 720, + "name": "Favorites", + "saved_on": null, + "revision": 1 + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v4.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v4.json new file mode 100644 index 00000000000..4463ede5a9d --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v4.json @@ -0,0 +1,352 @@ +{ + "version": 4, + "flows": [ + { + "definition": { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "name": "< 25", + "id": 15572 + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "name": "> 100", + "id": 15573 + } + ] + } + ] + }, + { + "y": 237, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 535, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 265, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 805, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "rule_sets": [ + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": null, + "label": "Color", + "finished_key": null, + "response_type": "C", + "y": 129, + "x": 98, + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "webhook_action": "POST", + "operand": "@extra.value", + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": null, + "label": "Beer", + "operand": "@step.value|lower_case", + "finished_key": null, + "response_type": "C", + "y": 387, + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": null, + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "response_type": "C", + "y": 702, + "x": 191, + "config": {} + } + ], + "metadata": { + "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", + "notes": [], + "expires": 720, + "name": "Favorites", + "revision": 1, + "saved_on": "2017-08-16T23:10:18.579169Z" + } + }, + "version": 4, + "flow_type": "F", + "name": "Favorites", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v5.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v5.json new file mode 100644 index 00000000000..cb95631ce7d --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v5.json @@ -0,0 +1,421 @@ +{ + "version": 5, + "flows": [ + { + "definition": { + "base_language": "base", + "rule_sets": [ + { + "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "expression", + "label": "Color", + "operand": "@extra.value", + "finished_key": null, + "y": 329, + "x": 98, + "config": {} + }, + { + "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "expression", + "label": "Beer", + "y": 687, + "finished_key": null, + "operand": "@step.value|lower_case", + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Name", + "y": 1002, + "finished_key": null, + "operand": "@step.value", + "x": 191, + "config": {} + }, + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "wait_message", + "label": "Color Response", + "y": 129, + "finished_key": null, + "operand": "@step.value", + "x": 98, + "config": {} + }, + { + "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "webhook_action": "POST", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "ruleset_type": "webhook", + "label": "Color Webhook", + "y": 229, + "finished_key": null, + "operand": "@step.value", + "x": 98, + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Beer Response", + "operand": "@step.value", + "finished_key": null, + "y": 587, + "x": 112, + "config": {} + } + ], + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "name": "< 25", + "id": 15572 + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "name": "> 100", + "id": 15573 + } + ] + } + ] + }, + { + "y": 437, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 835, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 465, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 1105, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "metadata": { + "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", + "notes": [], + "expires": 720, + "name": "Favorites", + "saved_on": "2017-08-16T23:10:18.579169Z", + "revision": 1 + } + }, + "version": 5, + "flow_type": "F", + "name": "Favorites", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v6.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v6.json new file mode 100644 index 00000000000..9025e045606 --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v6.json @@ -0,0 +1,422 @@ +{ + "version": 6, + "flows": [ + { + "definition": { + "base_language": "base", + "rule_sets": [ + { + "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "expression", + "label": "Color", + "y": 329, + "finished_key": null, + "operand": "@extra.value", + "x": 98, + "config": {} + }, + { + "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "expression", + "label": "Beer", + "operand": "@step.value|lower_case", + "finished_key": null, + "y": 687, + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "y": 1002, + "x": 191, + "config": {} + }, + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "wait_message", + "label": "Color Response", + "operand": "@step.value", + "finished_key": null, + "y": 129, + "x": 98, + "config": {} + }, + { + "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "webhook_action": "POST", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "ruleset_type": "webhook", + "label": "Color Webhook", + "operand": "@step.value", + "finished_key": null, + "y": 229, + "x": 98, + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Beer Response", + "y": 587, + "finished_key": null, + "operand": "@step.value", + "x": 112, + "config": {} + } + ], + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "name": "< 25", + "id": 15572 + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "name": "> 100", + "id": 15573 + } + ] + } + ] + }, + { + "y": 437, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 835, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 465, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 1105, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", + "metadata": { + "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", + "notes": [], + "expires": 720, + "name": "Favorites", + "saved_on": "2017-08-16T23:10:18.579169Z", + "revision": 1 + } + }, + "version": 6, + "flow_type": "F", + "name": "Favorites", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v7.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v7.json new file mode 100644 index 00000000000..14407c17a5c --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v7.json @@ -0,0 +1,418 @@ +{ + "version": 7, + "flows": [ + { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "name": "< 25", + "id": 15572 + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "name": "> 100", + "id": 15573 + } + ] + } + ] + }, + { + "y": 437, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 835, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 465, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 1105, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "flow_type": "F", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", + "rule_sets": [ + { + "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "expression", + "label": "Color", + "operand": "@extra.value", + "finished_key": null, + "y": 329, + "x": 98, + "config": {} + }, + { + "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "expression", + "label": "Beer", + "y": 687, + "finished_key": null, + "operand": "@step.value|lower_case", + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Name", + "y": 1002, + "finished_key": null, + "operand": "@step.value", + "x": 191, + "config": {} + }, + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "wait_message", + "label": "Color Response", + "y": 129, + "finished_key": null, + "operand": "@step.value", + "x": 98, + "config": {} + }, + { + "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "webhook_action": "POST", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "ruleset_type": "webhook", + "label": "Color Webhook", + "y": 229, + "finished_key": null, + "operand": "@step.value", + "x": 98, + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Beer Response", + "operand": "@step.value", + "finished_key": null, + "y": 587, + "x": 112, + "config": {} + } + ], + "metadata": { + "uuid": null, + "notes": [], + "expires": 720, + "name": "Favorites", + "saved_on": null, + "id": null, + "revision": 1 + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v8.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v8.json new file mode 100644 index 00000000000..0d512ae8ef2 --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v8.json @@ -0,0 +1,418 @@ +{ + "version": 8, + "flows": [ + { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "name": "< 25", + "id": 15572 + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "name": "> 100", + "id": 15573 + } + ] + } + ] + }, + { + "y": 437, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 835, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 465, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 1105, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "flow_type": "F", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", + "rule_sets": [ + { + "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "expression", + "label": "Color", + "y": 329, + "finished_key": null, + "operand": "@extra.value", + "x": 98, + "config": {} + }, + { + "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "expression", + "label": "Beer", + "operand": "@(LOWER(step.value))", + "finished_key": null, + "y": 687, + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "y": 1002, + "x": 191, + "config": {} + }, + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "wait_message", + "label": "Color Response", + "operand": "@step.value", + "finished_key": null, + "y": 129, + "x": 98, + "config": {} + }, + { + "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "webhook_action": "POST", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "ruleset_type": "webhook", + "label": "Color Webhook", + "operand": "@step.value", + "finished_key": null, + "y": 229, + "x": 98, + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Beer Response", + "y": 587, + "finished_key": null, + "operand": "@step.value", + "x": 112, + "config": {} + } + ], + "metadata": { + "uuid": null, + "notes": [], + "expires": 720, + "name": "Favorites", + "revision": 1, + "id": null, + "saved_on": null + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_bad_group_name_v9.json b/media/test_flows/legacy/migrations/favorites_bad_group_name_v9.json new file mode 100644 index 00000000000..ee7c978446d --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_bad_group_name_v9.json @@ -0,0 +1,417 @@ +{ + "version": 9, + "flows": [ + { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + }, + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "groups": [ + { + "uuid": "0fdffdb4-3ca4-4d35-b6a7-129b0dfc7d39", + "name": "< 25" + } + ] + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "groups": [ + { + "uuid": "e5e7bfaf-7c35-4590-8039-c33da2b98d8c", + "name": "> 100" + } + ] + } + ] + }, + { + "y": 437, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 835, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 465, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 1105, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "flow_type": "F", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd", + "rule_sets": [ + { + "uuid": "c564c56f-0341-471e-8bb1-e303090fea6a", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "expression", + "label": "Color", + "y": 329, + "finished_key": null, + "operand": "@extra.value", + "x": 98, + "config": {} + }, + { + "uuid": "8b941374-1b65-4154-afa3-27b871f7be6b", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "expression", + "label": "Beer", + "operand": "@(LOWER(step.value))", + "finished_key": null, + "y": 687, + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "y": 1002, + "x": 191, + "config": {} + }, + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": "wait_message", + "label": "Color Response", + "operand": "@step.value", + "finished_key": null, + "y": 129, + "x": 98, + "config": {} + }, + { + "uuid": "c7dae7c5-129d-44f7-8c05-c32a0efc6058", + "webhook_action": "POST", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "c564c56f-0341-471e-8bb1-e303090fea6a", + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "webhook": "http://localhost:49999/echo?content=%7B%20%22status%22%3A%20%22valid%22%20%7D", + "ruleset_type": "webhook", + "label": "Color Webhook", + "operand": "@step.value", + "finished_key": null, + "y": 229, + "x": 98, + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "8b941374-1b65-4154-afa3-27b871f7be6b", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": "wait_message", + "label": "Beer Response", + "y": 587, + "finished_key": null, + "operand": "@step.value", + "x": 112, + "config": {} + } + ], + "metadata": { + "uuid": null, + "notes": [], + "expires": 720, + "name": "Favorites", + "revision": 1, + "saved_on": null + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/favorites_v4.json b/media/test_flows/legacy/migrations/favorites_v4.json new file mode 100644 index 00000000000..eb1e13ec4f0 --- /dev/null +++ b/media/test_flows/legacy/migrations/favorites_v4.json @@ -0,0 +1,332 @@ +{ + "version": 4, + "flows": [ + { + "definition": { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "msg": { + "base": "What is your favorite color?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "98388930-7a0f-4eb8-9a0a-09be2f006420" + } + ] + }, + { + "y": 237, + "x": 131, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "actions": [ + { + "msg": { + "base": "Good choice, I like @flow.color.category too! What is your favorite beer?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "cf57f270-c9d7-4826-b3cc-7bfc22ac4ef6" + } + ] + }, + { + "y": 8, + "x": 456, + "destination": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "uuid": "37f62180-025e-4360-a72b-59af7ac6d1ab", + "actions": [ + { + "msg": { + "base": "I don't know that color. Try again." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "d6aee40b-3710-4358-b0a6-c0ddc1d7734e" + } + ] + }, + { + "y": 535, + "x": 191, + "destination": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "uuid": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "actions": [ + { + "msg": { + "base": "Mmmmm... delicious @flow.beer.category. If only they made @(LOWER(flow.color)) @flow.beer.category! Lastly, what is your name?" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "ca798d2d-2c95-468a-a857-74797a4d5301" + } + ] + }, + { + "y": 265, + "x": 512, + "destination": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "uuid": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "actions": [ + { + "msg": { + "base": "I don't know that one, try again please." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "be5c0c50-b3a4-486f-9e2e-335bdb542385" + } + ] + }, + { + "y": 805, + "x": 191, + "destination": null, + "uuid": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "actions": [ + { + "msg": { + "base": "Thanks @flow.name, we are all done!" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "512aa0ca-0c57-4b99-a7ad-e67d290e0c2c" + } + ] + } + ], + "rule_sets": [ + { + "uuid": "0ecf7914-05e0-4b71-8816-495d2c0921b5", + "rules": [ + { + "test": { + "test": { + "base": "Red" + }, + "type": "contains_any" + }, + "category": { + "base": "Red" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "663f667d-561a-4920-9375-3ce367615bdc", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Green" + }, + "type": "contains_any" + }, + "category": { + "base": "Green" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "8977bc24-d10c-4b1a-9b07-13e3447165d1", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Blue" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "56e47151-0a7d-4dd8-89cf-35fdcb5288ef", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Navy" + }, + "type": "contains_any" + }, + "category": { + "base": "Blue" + }, + "destination": "00c0ebde-1d4f-4bf5-b5db-22f72b2551b7", + "uuid": "08403c82-043d-4744-8e1a-c863e5e92fb7", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Cyan" + }, + "type": "contains_any" + }, + "category": { + "base": "Cyan" + }, + "destination": null, + "uuid": "cc43e621-c759-4976-8088-e89a0bce7749", + "destination_type": null + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": null, + "uuid": "955bd46a-29b2-49eb-bc70-0fc573d1cceb", + "destination_type": null + } + ], + "ruleset_type": null, + "label": "Color", + "finished_key": null, + "response_type": "C", + "y": 129, + "x": 98, + "webhook": "http://localhost:49999/status", + "webhook_action": "POST", + "operand": "@extra.value", + "config": {} + }, + { + "uuid": "58ec23b9-70bb-4d70-a739-8cee2d2f1e75", + "rules": [ + { + "test": { + "test": { + "base": "Mutzig" + }, + "type": "contains_any" + }, + "category": { + "base": "Mutzig" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "d9c89b0e-2083-42e4-93c9-4d75e5f6c86f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Primus" + }, + "type": "contains_any" + }, + "category": { + "base": "Primus" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "777a04d2-aa27-4024-9b15-99f699a65a2f", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Turbo King" + }, + "type": "contains_any" + }, + "category": { + "base": "Turbo King" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "ad519b79-9738-449d-80a1-e8fc3aebd08e", + "destination_type": "A" + }, + { + "test": { + "test": { + "base": "Skol" + }, + "type": "contains_any" + }, + "category": { + "base": "Skol" + }, + "destination": "92a6a4c6-c976-405a-97c8-76bf7edd214a", + "uuid": "c27f16dc-519c-44a9-bee7-fbfe76ade983", + "destination_type": "A" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "Other" + }, + "destination": "7c3c0319-20ee-4c30-a276-55dba0d049de", + "uuid": "fbccf8e5-b167-43f1-ace3-72802ba6db92", + "destination_type": "A" + } + ], + "ruleset_type": null, + "label": "Beer", + "operand": "@step.value|lower_case", + "finished_key": null, + "response_type": "C", + "y": 387, + "x": 112, + "config": {} + }, + { + "uuid": "c85670d3-e550-40f7-9ce2-e22c5d3fbcea", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "base": "All Responses" + }, + "destination": "fbb7df74-627a-45bb-83f6-e3e4d2d8020c", + "uuid": "cf3633cc-d2e4-4f25-b318-a2ddc61b6849", + "destination_type": "A" + } + ], + "ruleset_type": null, + "label": "Name", + "operand": "@step.value", + "finished_key": null, + "response_type": "C", + "y": 702, + "x": 191, + "config": {} + } + ], + "metadata": { + "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", + "notes": [], + "expires": 720, + "name": "Favorites", + "revision": 1, + "saved_on": "2017-08-16T23:10:18.579169Z" + } + }, + "version": 4, + "flow_type": "F", + "name": "Favorites", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/ivr_v3.json b/media/test_flows/legacy/migrations/ivr_v3.json new file mode 100644 index 00000000000..97a282b2b30 --- /dev/null +++ b/media/test_flows/legacy/migrations/ivr_v3.json @@ -0,0 +1,161 @@ +{ + "campaigns": [], + "version": 3, + "site": "http://rapidpro.io", + "flows": [ + { + "definition": { + "entry": "d1dd7b53-dafe-493f-a267-62301e76ee85", + "rule_sets": [ + { + "uuid": "c939d4cf-0294-4478-952b-a630ba972ba1", + "webhook_action": null, + "response_type": "C", + "rules": [ + { + "test": { + "test": "1", + "type": "eq" + }, + "category": "Yes", + "destination": "92367194-924b-4c47-9250-e47363855e32", + "uuid": "4cdf62ea-5cba-4261-992c-246c34667dc3" + }, + { + "test": { + "test": "2", + "type": "eq" + }, + "category": "No", + "destination": "866e80ae-128e-4e49-98b9-51317ec847e3", + "uuid": "a9b6086e-a423-4790-a342-df2c9972fc8c" + }, + { + "test": { + "test": "3", + "type": "eq" + }, + "category": "Maybe", + "destination": "096de08e-b260-4025-a2fd-f61996a3f4eb", + "uuid": "a4e661de-9ec1-424d-a383-362a456925e0" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": "Other", + "destination": "13549d38-f341-4ad5-ad44-1e4e5cedd032", + "uuid": "f8d4e9b0-846a-4508-a3d7-2f910fa04fc0" + } + ], + "webhook": null, + "label": "Call Me", + "operand": "@step.value", + "finished_key": null, + "y": 165, + "x": 204 + } + ], + "action_sets": [ + { + "y": 91, + "x": 655, + "destination": "c939d4cf-0294-4478-952b-a630ba972ba1", + "uuid": "13549d38-f341-4ad5-ad44-1e4e5cedd032", + "actions": [ + { + "recording": null, + "msg": "Press one, two, or three. Thanks.", + "type": "say", + "uuid": "6d8d0bd4-7b72-4a91-ad78-2ac3a5220637" + } + ] + }, + { + "y": 294, + "x": 531, + "destination": null, + "uuid": "096de08e-b260-4025-a2fd-f61996a3f4eb", + "actions": [ + { + "recording": null, + "msg": "This might be crazy.", + "type": "say", + "uuid": "80cd8158-6e2a-4adb-8ddc-f9b5b036a7ad" + } + ] + }, + { + "y": 294, + "x": 310, + "destination": null, + "uuid": "866e80ae-128e-4e49-98b9-51317ec847e3", + "actions": [ + { + "recording": null, + "msg": "Fine, this is the last time we shall speak.", + "type": "say", + "uuid": "14849fb6-3a7d-41c8-9595-9e97cf17f9dd" + } + ] + }, + { + "y": 291, + "x": 91, + "destination": null, + "uuid": "92367194-924b-4c47-9250-e47363855e32", + "actions": [ + { + "recording": null, + "msg": "Great, I can't wait to give you a call later.", + "type": "say", + "uuid": "cc6c5044-ec52-4861-ba66-b2ee741b668c" + } + ] + }, + { + "y": 0, + "x": 101, + "destination": "c939d4cf-0294-4478-952b-a630ba972ba1", + "uuid": "d1dd7b53-dafe-493f-a267-62301e76ee85", + "actions": [ + { + "recording": null, + "msg": "Would you like me to call you? Press one for yes, two for no, or three for maybe.", + "type": "say", + "uuid": "03290af7-4748-46e7-ac8d-1967375de33a" + } + ] + } + ], + "metadata": { + "notes": [] + } + }, + "id": 100, + "flow_type": "V", + "name": "Call me maybe" + } + ], + "triggers": [ + { + "flow": { + "name": "Call me maybe", + "id": 100 + }, + "groups": [], + "keyword": "callme", + "trigger_type": "K" + }, + { + "flow": { + "name": "Call me maybe", + "id": 100 + }, + "groups": [], + "keyword": null, + "trigger_type": "V" + } + ] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/malformed_groups.json b/media/test_flows/legacy/migrations/malformed_groups.json new file mode 100644 index 00000000000..850c4f1a303 --- /dev/null +++ b/media/test_flows/legacy/migrations/malformed_groups.json @@ -0,0 +1,49 @@ +{ + "version": 4, + "flows": [ + { + "definition": { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": null, + "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", + "actions": [ + { + "type": "add_group", + "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", + "group": { + "name": "< 25", + "id": 15572 + } + }, + { + "type": "del_group", + "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", + "group": { + "id": 15573 + } + } + ] + } + ], + "rule_sets": [], + "metadata": { + "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", + "notes": [], + "expires": 720, + "name": "Bad Mojo", + "revision": 1, + "saved_on": "2017-08-16T23:10:18.579169Z" + } + }, + "version": 4, + "flow_type": "F", + "name": "Bad Mojo", + "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/malformed_single_message.json b/media/test_flows/legacy/migrations/malformed_single_message.json new file mode 100644 index 00000000000..4a11d0d3979 --- /dev/null +++ b/media/test_flows/legacy/migrations/malformed_single_message.json @@ -0,0 +1,31 @@ +{ + "campaigns": [], + "triggers": [], + "version": 3, + "site": "http://rapidpro.io", + "flows": [ + { + "name": "Single Message Flow", + "id": -1, + "uuid": "f467561a-3b95-4a4a-94bc-97bc6b4268c0", + "definition": { + "entry": "2d702ba6-461e-442c-96bc-2b8a87c9ceca", + "action_sets": [ + { + "x": 0, + "y": 0, + "uuid": "2d702ba6-461e-442c-96bc-2b8a87c9ceca", + "destination": null, + "actions": [ + { + "msg": "Single message text", + "type": "reply" + } + ] + } + ], + "rulesets": [] + } + } + ] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_0.json b/media/test_flows/legacy/migrations/migrate_to_11_0.json new file mode 100644 index 00000000000..34355cc297d --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_0.json @@ -0,0 +1,42 @@ +{ + "version": "10.4", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "d96947d0-f975-47ee-be7d-3dfe68a52703", + "action_sets": [ + { + "uuid": "d96947d0-f975-47ee-be7d-3dfe68a52703", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "msg": { + "base": { + "base": "@date Something went wrong once. I shouldn't be a dict inside a dict." + } + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "1ee58c31-3504-49d3-914b-324d484fed1d" + } + ], + "exit_uuid": "f2566f59-5d36-4de7-8581-dcc5de7e8340" + } + ], + "rule_sets": [], + "base_language": "base", + "flow_type": "M", + "version": "10.4", + "metadata": { + "name": "Migrate to 11.0", + "saved_on": "2017-11-15T22:56:36.039558Z", + "revision": 5, + "uuid": "5a8deb77-23b8-46ee-a775-48ed32742e31", + "expires": 720 + } + } + ] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_10.json b/media/test_flows/legacy/migrations/migrate_to_11_10.json new file mode 100644 index 00000000000..92e6896057d --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_10.json @@ -0,0 +1,239 @@ +{ + "version": "11.9", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "bd6ca3fc-0505-4ea6-a1c6-60d0296a7db0", + "action_sets": [ + { + "uuid": "bd6ca3fc-0505-4ea6-a1c6-60d0296a7db0", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "type": "say", + "uuid": "0738e369-279d-4e2f-a14c-08714b0d6f74", + "msg": { + "eng": "Hi there this is an IVR flow.. how did you get here?" + }, + "recording": null + } + ], + "exit_uuid": "0e78ff3d-8307-4c0e-a3b0-af4019930835" + } + ], + "rule_sets": [], + "base_language": "eng", + "flow_type": "V", + "version": "11.9", + "metadata": { + "name": "Migrate to 11.10 IVR Child", + "saved_on": "2019-01-25T21:14:37.475679Z", + "revision": 2, + "uuid": "5331c09c-2bd6-47a5-ac0d-973caf9d4cb5", + "expires": 5, + "ivr_retry": 60, + "ivr_retry_failed_events": false + } + }, + { + "entry": "920ce708-31d3-4870-804f-190fb37b9b8c", + "action_sets": [ + { + "uuid": "920ce708-31d3-4870-804f-190fb37b9b8c", + "x": 59, + "y": 0, + "destination": "90363d00-a669-4d84-ab57-eb27bf9c3284", + "actions": [ + { + "type": "reply", + "uuid": "3071cb5d-4caf-4a15-87c7-daae4a436ee7", + "msg": { + "eng": "hi" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "f646245c-ac46-4565-9215-cef53c34da09" + }, + { + "uuid": "bbd1c25f-ab01-4539-8f3e-b0ca18f366f4", + "x": 48, + "y": 345, + "destination": null, + "actions": [ + { + "type": "flow", + "uuid": "edb70527-47fa-463e-8318-359254b1bc0e", + "flow": { + "uuid": "5331c09c-2bd6-47a5-ac0d-973caf9d4cb5", + "name": "Migrate to 11.10 IVR Child" + } + } + ], + "exit_uuid": "330f0f9a-154b-49de-9ff9-a7891d4a11af" + }, + { + "uuid": "62e29de4-d85e-459d-ad38-220d1048b714", + "x": 412, + "y": 348, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "41ed5ba3-41c7-4e6f-b394-d451204bcf0f", + "msg": { + "eng": "Expired" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "0040f402-a6ac-4de4-8775-a4938b9011b8" + } + ], + "rule_sets": [ + { + "uuid": "90363d00-a669-4d84-ab57-eb27bf9c3284", + "x": 218, + "y": 82, + "label": "Response 1", + "rules": [ + { + "uuid": "4c6ac0ad-e8a8-4b1e-b958-ef2f22728821", + "category": { + "eng": "Completed" + }, + "destination": "e5dae061-2c94-45ae-a3bb-4822989e636a", + "destination_type": "R", + "test": { + "type": "subflow", + "exit_type": "completed" + }, + "label": null + }, + { + "uuid": "288dfab6-5171-4cf0-92af-e73af44dbeee", + "category": { + "eng": "Expired" + }, + "destination": "e5dae061-2c94-45ae-a3bb-4822989e636a", + "destination_type": "R", + "test": { + "type": "subflow", + "exit_type": "expired" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "subflow", + "response_type": "", + "operand": "@step.value", + "config": { + "flow": { + "name": "Migrate to 11.10 SMS Child", + "uuid": "a492288a-7b26-4507-b8db-173d28b83ad0" + } + } + }, + { + "uuid": "e5dae061-2c94-45ae-a3bb-4822989e636a", + "x": 218, + "y": 228, + "label": "Response 2", + "rules": [ + { + "uuid": "b9f763d2-82d7-4334-8ed8-806b803d32c1", + "category": { + "eng": "Completed" + }, + "destination": "bbd1c25f-ab01-4539-8f3e-b0ca18f366f4", + "destination_type": "A", + "test": { + "type": "subflow", + "exit_type": "completed" + }, + "label": null + }, + { + "uuid": "54b51a30-8c52-49aa-afc1-24d827a17a8d", + "category": { + "eng": "Expired" + }, + "destination": "62e29de4-d85e-459d-ad38-220d1048b714", + "destination_type": "A", + "test": { + "type": "subflow", + "exit_type": "expired" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "subflow", + "response_type": "", + "operand": "@step.value", + "config": { + "flow": { + "name": "Migrate to 11.10 IVR Child", + "uuid": "5331c09c-2bd6-47a5-ac0d-973caf9d4cb5" + } + } + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.9", + "metadata": { + "name": "Migrate to 11.10 Parent", + "saved_on": "2019-01-28T19:51:28.310305Z", + "revision": 52, + "uuid": "880cea73-fab6-4353-9db2-bf2e16067941", + "expires": 10080 + } + }, + { + "entry": "762fb8ad-1ec5-4246-a577-e08f0fe497e5", + "action_sets": [ + { + "uuid": "762fb8ad-1ec5-4246-a577-e08f0fe497e5", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "69a7f227-5f44-4ddc-80e1-b9dd855868eb", + "msg": { + "eng": "I'm just a regular honest messaging flow" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "8ec7a5ed-675c-4102-b211-ea10258ac5f9" + } + ], + "rule_sets": [], + "base_language": "eng", + "flow_type": "M", + "version": "11.9", + "metadata": { + "name": "Migrate to 11.10 SMS Child", + "saved_on": "2019-01-28T19:03:29.579743Z", + "revision": 2, + "uuid": "a492288a-7b26-4507-b8db-173d28b83ad0", + "expires": 10080, + "ivr_retry_failed_events": null + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_11.json b/media/test_flows/legacy/migrations/migrate_to_11_11.json new file mode 100644 index 00000000000..f9ca3c4ca58 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_11.json @@ -0,0 +1,107 @@ +{ + "version": "11.10", + "site": "https://textit.in", + "flows": [ + { + "entry": "22505d46-43c5-42ba-975e-725c01ea440f", + "action_sets": [ + { + "uuid": "22505d46-43c5-42ba-975e-725c01ea440f", + "x": 100, + "y": 0, + "destination": "f3a1a671-5f5b-489e-9410-9a09fa5eaafb", + "actions": [ + { + "type": "reply", + "uuid": "27dfd8ac-55c5-49c9-88e3-3fb84a9894ff", + "msg": { + "eng": "Hey" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "6e2b09ec-3cc0-4ee6-ae7b-b76bad3ab6d3" + }, + { + "uuid": "f3a1a671-5f5b-489e-9410-9a09fa5eaafb", + "x": 95, + "y": 101, + "destination": "78c20ee4-94bd-45e6-8510-8e602568fb6e", + "actions": [ + { + "type": "add_label", + "uuid": "bc82c11d-7654-44e4-966c-fb39e2851df0", + "labels": [ + { + "uuid": "0bfecd01-9612-48ab-8c49-72170de6ee49", + "name": "Hello" + } + ] + } + ], + "exit_uuid": "84bf44a1-13fd-44cb-8014-d6feb06e010f" + }, + { + "uuid": "7ca2b0ef-0b23-4c6e-bccb-c5f2d62d2663", + "x": 146, + "y": 358, + "destination": null, + "actions": [ + { + "type": "add_label", + "uuid": "910bf3b5-951f-47a8-93df-11a6eac8bf0f", + "labels": [ + { + "uuid": "0bfecd01-9612-48ab-8c49-72170de6ee49", + "name": "Hello" + } + ] + } + ], + "exit_uuid": "6d579c28-9f3f-4584-bd2e-74009612fdbb" + } + ], + "rule_sets": [ + { + "uuid": "78c20ee4-94bd-45e6-8510-8e602568fb6e", + "x": 85, + "y": 219, + "label": "Response 1", + "rules": [ + { + "uuid": "33438bbf-49bd-4468-9a74-bbd7e1f58f57", + "category": { + "eng": "All Responses" + }, + "destination": "7ca2b0ef-0b23-4c6e-bccb-c5f2d62d2663", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "", + "operand": "@step.value", + "config": {} + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.10", + "metadata": { + "name": "Add Label", + "saved_on": "2019-02-12T09:23:05.746930Z", + "revision": 7, + "uuid": "e9b5b8ba-43f4-4bc2-a790-811ee1cfe392", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_12.json b/media/test_flows/legacy/migrations/migrate_to_11_12.json new file mode 100644 index 00000000000..c4aec731cad --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_12.json @@ -0,0 +1,197 @@ +{ + "version": "11.12", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "456b7f83-a96b-4f17-aa0a-116a30ee0d52", + "action_sets": [ + { + "uuid": "456b7f83-a96b-4f17-aa0a-116a30ee0d52", + "x": 100, + "y": 0, + "destination": "cfea15b5-3761-41d0-ad3e-33df7a9b835a", + "actions": [ + { + "type": "channel", + "uuid": "338300e8-b433-4372-8a12-87a0f543ee8a", + "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", + "name": "Android: 1234" + } + ], + "exit_uuid": "6fb525e7-bc24-4358-acde-f2d712b28f2b" + }, + { + "uuid": "cfea15b5-3761-41d0-ad3e-33df7a9b835a", + "x": 114, + "y": 156, + "destination": "3bb1fb6d-f0a3-4ec7-abba-cc5fac4c6a9d", + "actions": [ + { + "type": "reply", + "uuid": "bbdd28f0-824f-41b4-af25-5d6f9a4afefb", + "msg": { + "base": "Hey there, Yes or No?" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "902db0bc-f6a7-45d2-93b2-f47f3af1261e" + }, + { + "uuid": "af882e66-9ae2-4bc1-9af7-c8c2e7373766", + "x": 181, + "y": 452, + "destination": "85d88c16-fafe-4b8e-8e58-a6dc6e1e0e77", + "actions": [ + { + "type": "channel", + "uuid": "437d71a2-bb17-4e71-bef7-ad6b58f0eb85", + "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", + "name": "Android: 1234" + } + ], + "exit_uuid": "cec84721-7f8f-43c3-9af2-4d5d6a15f9de" + }, + { + "uuid": "76e091fe-62a5-4786-9465-7c1fb2446694", + "x": 460, + "y": 117, + "destination": "ef9afd2d-d106-4168-a104-20ddc14f9444", + "actions": [ + { + "type": "reply", + "uuid": "f7d12748-440e-4ef1-97d4-8a9efddf4454", + "msg": { + "base": "Yo, What? Repeat Yes or No" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "f5ce0ce5-8023-4b8d-b635-762a2c18726f" + }, + { + "uuid": "9eef8677-8598-4e87-9e21-3ad245d87aee", + "x": 193, + "y": 633, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "1d3ec932-6b6f-45c2-b4d6-9a0e07721686", + "msg": { + "base": "Bye" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "839dd7c4-64b9-428f-b1d0-c386f493fc4f" + }, + { + "uuid": "85d88c16-fafe-4b8e-8e58-a6dc6e1e0e77", + "x": 173, + "y": 550, + "destination": "9eef8677-8598-4e87-9e21-3ad245d87aee", + "actions": [ + { + "type": "channel", + "uuid": "0afa546d-8308-41c2-a70c-979846108bec", + "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", + "name": "Android: 1234" + } + ], + "exit_uuid": "835a5ca9-d518-452f-865c-ca8e5cde4777" + }, + { + "uuid": "ef9afd2d-d106-4168-a104-20ddc14f9444", + "x": 501, + "y": 242, + "destination": "3bb1fb6d-f0a3-4ec7-abba-cc5fac4c6a9d", + "actions": [ + { + "type": "channel", + "uuid": "28d63382-40ea-4741-ba3a-2930348fab0e", + "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", + "name": "Android: 1234" + } + ], + "exit_uuid": "be8ca9a5-0f61-4c9d-93e4-02aa6bb27afc" + } + ], + "rule_sets": [ + { + "uuid": "3bb1fb6d-f0a3-4ec7-abba-cc5fac4c6a9d", + "x": 134, + "y": 315, + "label": "Response 1", + "rules": [ + { + "uuid": "2924a1d0-be47-4f8e-aefb-f7ff3a563a43", + "category": { + "base": "Yes" + }, + "destination": "af882e66-9ae2-4bc1-9af7-c8c2e7373766", + "destination_type": "A", + "test": { + "type": "contains_any", + "test": { + "base": "Yes" + } + }, + "label": null + }, + { + "uuid": "0107f9e4-b46c-40d7-b25b-058cac3a167e", + "category": { + "base": "No" + }, + "destination": "af882e66-9ae2-4bc1-9af7-c8c2e7373766", + "destination_type": "A", + "test": { + "type": "contains_any", + "test": { + "base": "No" + } + }, + "label": null + }, + { + "uuid": "ad81cc6d-1973-4eed-b97d-6edd9ebdeedc", + "category": { + "base": "Other" + }, + "destination": "76e091fe-62a5-4786-9465-7c1fb2446694", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "", + "operand": "@step.value", + "config": {} + } + ], + "base_language": "base", + "flow_type": "M", + "version": "11.12", + "metadata": { + "name": "channels", + "saved_on": "2019-02-26T21:16:32.055957Z", + "revision": 24, + "uuid": "e5fdf453-428f-4da1-9703-0decdf7cf6f9", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_12_one_node.json b/media/test_flows/legacy/migrations/migrate_to_11_12_one_node.json new file mode 100644 index 00000000000..3f4585c1277 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_12_one_node.json @@ -0,0 +1,38 @@ +{ + "version": "11.11", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "b0b6559d-e5bd-4deb-a4ab-9e5f04001dd4", + "action_sets": [ + { + "uuid": "b0b6559d-e5bd-4deb-a4ab-9e5f04001dd4", + "x": 100, + "y": 0, + "actions": [ + { + "type": "channel", + "uuid": "4b34b85d-da31-40c9-af65-6d76ca54b1b5", + "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", + "name": "Android: 1234" + } + ], + "exit_uuid": "be37f250-f992-45e0-97fd-a3c0f57584dc" + } + ], + "rule_sets": [], + "base_language": "base", + "flow_type": "M", + "version": "11.11", + "metadata": { + "name": "channel", + "saved_on": "2019-02-28T08:55:17.275670Z", + "revision": 2, + "uuid": "8a8612bc-ff3a-45ea-b7a5-2673ce901cd9", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_12_other_org.json b/media/test_flows/legacy/migrations/migrate_to_11_12_other_org.json new file mode 100644 index 00000000000..14b2cdbd297 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_12_other_org.json @@ -0,0 +1,39 @@ +{ + "version": "11.11", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "a1c00b3e-a904-4085-851d-e5e386d728b8", + "action_sets": [ + { + "uuid": "a1c00b3e-a904-4085-851d-e5e386d728b8", + "x": 124, + "y": 16, + "actions": [ + { + "type": "channel", + "channel": "CHANNEL-UUID", + "uuid": "84889e4d-e7e8-4415-9ad9-db27d9972558", + "name": "Not Ours" + } + ], + "exit_uuid": "eada09b7-7136-4f24-a34f-62ca7b404423" + } + ], + "rule_sets": [], + "base_language": "eng", + "flow_type": "M", + "version": "11.11", + "metadata": { + "name": "Other Org Channel", + "saved_on": "2019-02-25T20:36:14.155001Z", + "revision": 19, + "uuid": "bb8ca54b-7dcb-431f-bd86-ec3082b63469", + "expires": 43200, + "ivr_retry_failed_events": null, + "notes": [] + }, + "type": "M" + } + ] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_3.json b/media/test_flows/legacy/migrations/migrate_to_11_3.json new file mode 100644 index 00000000000..d8271973ddc --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_3.json @@ -0,0 +1,84 @@ +{ + "version": "11.2", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", + "action_sets": [ + { + "uuid": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", + "x": 412, + "y": 814, + "destination": null, + "actions": [ + { + "type": "api", + "uuid": "9b46779a-f680-450f-8f3c-005f3b7efccd", + "webhook": "http://example.com/?thing=@flow.response_1&foo=bar", + "action": "POST", + "webhook_headers": [] + } + ], + "exit_uuid": "25d8d2ae-ea82-4214-9561-42e0bf420a93" + } + ], + "rule_sets": [ + { + "uuid": "2831f7ad-23e6-4ab3-91d9-936f14fcf35e", + "x": 100, + "y": 0, + "label": "Response 1", + "rules": [ + { + "uuid": "c799def9-345b-46f9-a838-a59191cdb181", + "category": { + "eng": "Success" + }, + "destination": "7e0afb0a-8ca2-479f-8f72-49f8c1081d60", + "destination_type": "R", + "test": { + "type": "webhook_status", + "status": "success" + }, + "label": null + }, + { + "uuid": "1ace9344-3053-4dc2-aced-9a6e3c8a6e9d", + "category": { + "eng": "Failure" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "webhook_status", + "status": "failure" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "webhook", + "response_type": "", + "operand": "@step.value", + "config": { + "webhook": "http://example.com/webhook1", + "webhook_action": "POST", + "webhook_headers": [] + } + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.2", + "metadata": { + "name": "Migrate to 11.3 Test", + "saved_on": "2018-09-25T14:57:23.429081Z", + "revision": 97, + "uuid": "915144c5-605e-46f3-afa3-53aae2c9b8ee", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_4.json b/media/test_flows/legacy/migrations/migrate_to_11_4.json new file mode 100644 index 00000000000..6ca69cddacc --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_4.json @@ -0,0 +1,168 @@ +{ + "version": "11.3", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "019d0fab-eb51-4431-9f51-ddf207d0a744", + "action_sets": [ + { + "uuid": "92fb739f-4a99-4e29-8078-1f8fb06d127e", + "x": 241, + "y": 425, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "0382e5aa-bfda-42c8-84d3-7893aba002f8", + "msg": { + "eng": "@flow.response_1.text\n@flow.response_2.text\n@flow.response_3.text\n@flow.response_3\n@(CONCATENATE(flow.response_2.text, \"blerg\"))" + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "send", + "uuid": "b5860896-db39-4ebb-b842-d38edf46fb61", + "msg": { + "eng": "@flow.response_1.text\n@flow.response_2.text\n@flow.response_3.text\n@flow.response_3\n@(CONCATENATE(flow.response_2.text, \"blerg\"))" + }, + "contacts": [ + { + "id": 277738, + "name": "05fe51bf5a434b9", + "uuid": "74eed75b-dd4f-4d24-9fc5-474052dbc086", + "urns": [ + { + "scheme": "tel", + "path": "+2353265262", + "priority": 90 + } + ] + } + ], + "groups": [], + "variables": [], + "media": {} + }, + { + "type": "email", + "uuid": "c9130ab6-d2b2-419c-8109-65b5afc47039", + "emails": [ + "test@test.com" + ], + "subject": "Testing", + "msg": "@flow.response_1.text\n@flow.response_2.text\n@flow.response_3.text\n@flow.response_3\n@(CONCATENATE(flow.response_2.text, \"blerg\"))" + } + ], + "exit_uuid": "ea5640be-105b-4277-b04e-7ad55d2c898e" + } + ], + "rule_sets": [ + { + "uuid": "019d0fab-eb51-4431-9f51-ddf207d0a744", + "x": 226, + "y": 118, + "label": "Response 1", + "rules": [ + { + "uuid": "7fd3aae5-66ca-4d8d-9923-3ef4424e7658", + "category": { + "eng": "All Responses" + }, + "destination": "fc1b062c-52c0-4c9e-87bd-1f9437d513bf", + "destination_type": "R", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "", + "operand": "@step.value", + "config": {} + }, + { + "uuid": "fc1b062c-52c0-4c9e-87bd-1f9437d513bf", + "x": 226, + "y": 232, + "label": "Response 2", + "rules": [ + { + "uuid": "58a4e6f6-fe44-4ac9-bf98-edffd6dfad04", + "category": { + "eng": "All Responses" + }, + "destination": "518b6f12-0192-4a75-8900-43a5dea02340", + "destination_type": "R", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "expression", + "response_type": "", + "operand": "@contact.uuid", + "config": {} + }, + { + "uuid": "518b6f12-0192-4a75-8900-43a5dea02340", + "x": 226, + "y": 335, + "label": "Response 3", + "rules": [ + { + "uuid": "0d1b5fd9-bfee-4df6-9837-9883787f0661", + "category": { + "eng": "Bucket 1" + }, + "destination": "92fb739f-4a99-4e29-8078-1f8fb06d127e", + "destination_type": "A", + "test": { + "type": "between", + "min": "0", + "max": "0.5" + }, + "label": null + }, + { + "uuid": "561b7ce2-5975-4925-a76a-f4a618b11c8b", + "category": { + "eng": "Bucket 2" + }, + "destination": "92fb739f-4a99-4e29-8078-1f8fb06d127e", + "destination_type": "A", + "test": { + "type": "between", + "min": "0.5", + "max": "1" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "random", + "response_type": "", + "operand": "@(RAND())", + "config": {} + } + ], + "base_language": "eng", + "flow_type": "F", + "version": "11.3", + "metadata": { + "name": "Migrate to 11.4", + "saved_on": "2018-06-25T21:58:04.000768Z", + "revision": 123, + "uuid": "025f1d6e-ec87-4045-8471-0a028b9483aa", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_5.json b/media/test_flows/legacy/migrations/migrate_to_11_5.json new file mode 100644 index 00000000000..86c2ee6af78 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_5.json @@ -0,0 +1,398 @@ +{ + "version": "11.4", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "2831f7ad-23e6-4ab3-91d9-936f14fcf35e", + "action_sets": [ + { + "uuid": "35707236-5dd6-487d-bea4-6a73822852bf", + "x": 122, + "y": 458, + "destination": "51956031-9f42-475f-9d43-3ab2f87f4dd2", + "actions": [ + { + "type": "reply", + "uuid": "c82df796-9d8f-4e9b-b76c-97027fa74ef7", + "msg": { + "eng": "@flow.response_1\n@flow.response_1.value\n@flow.response_1.category\n@(upper(flow.response_1))\n@(upper(flow.response_1.category))\n\n@flow.response_2\n@flow.response_2.value\n@flow.response_2.category\n@(upper(flow.response_2))\n@(upper(flow.response_2.category))\n\n@flow.response_3\n@flow.response_3.value\n@flow.response_3.category\n@(upper(flow.response_3))\n@(upper(flow.response_3.category))" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "65af1dca-b48e-4b36-867c-2ace47038093" + }, + { + "uuid": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", + "x": 412, + "y": 814, + "destination": null, + "actions": [ + { + "type": "api", + "uuid": "9b46779a-f680-450f-8f3c-005f3b7efccd", + "webhook": "http://example.com/?thing=@flow.response_1&foo=bar", + "action": "GET", + "webhook_headers": [] + }, + { + "type": "save", + "uuid": "e0ecf2a5-0429-45ec-a9d7-e2c122274484", + "label": "Contact Name", + "field": "name", + "value": "@flow.response_3.value" + } + ], + "exit_uuid": "25d8d2ae-ea82-4214-9561-42e0bf420a93" + } + ], + "rule_sets": [ + { + "uuid": "2831f7ad-23e6-4ab3-91d9-936f14fcf35e", + "x": 100, + "y": 0, + "label": "Response 1", + "rules": [ + { + "uuid": "c799def9-345b-46f9-a838-a59191cdb181", + "category": { + "eng": "Success" + }, + "destination": "7e0afb0a-8ca2-479f-8f72-49f8c1081d60", + "destination_type": "R", + "test": { + "type": "webhook_status", + "status": "success" + }, + "label": null + }, + { + "uuid": "1ace9344-3053-4dc2-aced-9a6e3c8a6e9d", + "category": { + "eng": "Failure" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "webhook_status", + "status": "failure" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "webhook", + "response_type": "", + "operand": "@step.value", + "config": { + "webhook": "http://example.com/webhook1", + "webhook_action": "GET", + "webhook_headers": [] + } + }, + { + "uuid": "7e0afb0a-8ca2-479f-8f72-49f8c1081d60", + "x": 103, + "y": 125, + "label": "Response 2", + "rules": [ + { + "uuid": "ce50f51d-f052-4ff1-8a9b-a79faa62dfc2", + "category": { + "eng": "Success" + }, + "destination": "5906c8f3-46f2-4319-8743-44fb26f2b109", + "destination_type": "R", + "test": { + "type": "webhook_status", + "status": "success" + }, + "label": null + }, + { + "uuid": "338e6c08-3597-4d22-beef-80d27b870a93", + "category": { + "eng": "Failure" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "webhook_status", + "status": "failure" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "webhook", + "response_type": "", + "operand": "@step.value", + "config": { + "webhook": "http://example.com/webhook2", + "webhook_action": "GET", + "webhook_headers": [] + } + }, + { + "uuid": "5906c8f3-46f2-4319-8743-44fb26f2b109", + "x": 105, + "y": 243, + "label": "Response 2", + "rules": [ + { + "uuid": "6328e346-49c6-4607-a573-e8dc6e60bfcd", + "category": { + "eng": "All Responses" + }, + "destination": "728a9a97-f28e-4fb3-a96a-7a7a8d5e5a4c", + "destination_type": "R", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "expression", + "response_type": "", + "operand": "@step.value", + "config": {} + }, + { + "uuid": "728a9a97-f28e-4fb3-a96a-7a7a8d5e5a4c", + "x": 112, + "y": 346, + "label": "Response 3", + "rules": [ + { + "uuid": "fb64dd04-8dd3-4e28-8607-468d1748a81f", + "category": { + "eng": "Success" + }, + "destination": "35707236-5dd6-487d-bea4-6a73822852bf", + "destination_type": "A", + "test": { + "type": "webhook_status", + "status": "success" + }, + "label": null + }, + { + "uuid": "992c7429-221a-40f0-80be-fd6fbe858f57", + "category": { + "eng": "Failure" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "webhook_status", + "status": "failure" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "resthook", + "response_type": "", + "operand": "@step.value", + "config": { + "resthook": "test-resthook-event" + } + }, + { + "uuid": "51956031-9f42-475f-9d43-3ab2f87f4dd2", + "x": 411, + "y": 513, + "label": "Response 5", + "rules": [ + { + "uuid": "c06fb4fe-09a0-4990-b32e-e233de7edfda", + "category": { + "eng": "All Responses" + }, + "destination": "f39a6d73-57d9-4d10-9055-57446addc87a", + "destination_type": "R", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "expression", + "response_type": "", + "operand": "@(flow.response_1 & flow.response_2 & flow.response_3)", + "config": {} + }, + { + "uuid": "f39a6d73-57d9-4d10-9055-57446addc87a", + "x": 414, + "y": 625, + "label": "Response 6", + "rules": [ + { + "uuid": "820f0020-0c72-44cd-9c12-a2b05c13e470", + "category": { + "eng": "Yes" + }, + "destination": "0e0c0e1f-e4ae-4531-ba19-48300de0f86d", + "destination_type": "R", + "test": { + "type": "contains_any", + "test": { + "eng": "yes" + } + }, + "label": null + }, + { + "uuid": "8e55e70f-acf0-45a2-b7f9-2f95ccbbfc4d", + "category": { + "eng": "Matching" + }, + "destination": "0e0c0e1f-e4ae-4531-ba19-48300de0f86d", + "destination_type": "R", + "test": { + "type": "contains_any", + "test": { + "eng": "@flow.response_1" + } + }, + "label": null + }, + { + "uuid": "d1c61a49-64f5-4ff6-b17f-1f22472f829f", + "category": { + "eng": "Other" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "flow_field", + "response_type": "", + "operand": "@flow.response_1", + "config": {} + }, + { + "uuid": "0e0c0e1f-e4ae-4531-ba19-48300de0f86d", + "x": 489, + "y": 722, + "label": "Response 7", + "rules": [ + { + "uuid": "234fff68-780f-442f-a1c6-757131fbc213", + "category": { + "eng": "Success" + }, + "destination": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", + "destination_type": "A", + "test": { + "type": "webhook_status", + "status": "success" + }, + "label": null + }, + { + "uuid": "70b79516-40a5-439c-9dee-45b242d6bb8b", + "category": { + "eng": "Failure" + }, + "destination": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", + "destination_type": "A", + "test": { + "type": "webhook_status", + "status": "failure" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "webhook", + "response_type": "", + "operand": "@step.value", + "config": { + "webhook": "http://example.com/?thing=@flow.response_1.value", + "webhook_action": "GET", + "webhook_headers": [] + } + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.4", + "metadata": { + "name": "Migrate to 11.5 Test", + "saved_on": "2018-09-25T14:57:23.429081Z", + "revision": 97, + "uuid": "915144c5-605e-46f3-afa3-53aae2c9b8ee", + "expires": 10080, + "notes": [ + { + "x": 357, + "y": 0, + "title": "New Note", + "body": "@flow.response_1" + }, + { + "x": 358, + "y": 117, + "title": "New Note", + "body": "flow.response_2" + }, + { + "x": 358, + "y": 236, + "title": "New Note", + "body": "reuses flow.response_2" + }, + { + "x": 360, + "y": 346, + "title": "New Note", + "body": "@flow.response_3" + }, + { + "x": 671, + "y": 498, + "title": "New Note", + "body": "operand should be migrated too" + }, + { + "x": 717, + "y": 608, + "title": "New Note", + "body": "rule test should be migrated" + }, + { + "x": 747, + "y": 712, + "title": "New Note", + "body": "webhook URL in config should be migrated" + }, + { + "x": 681, + "y": 830, + "title": "New Note", + "body": "webhook URL on action should be migrated" + }, + { + "x": 682, + "y": 934, + "title": "New Note", + "body": "field value should be migrated" + } + ] + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_6.json b/media/test_flows/legacy/migrations/migrate_to_11_6.json new file mode 100644 index 00000000000..97898acd159 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_6.json @@ -0,0 +1,252 @@ +{ + "version": "11.5", + "site": "https://textit.in", + "flows": [ + { + "entry": "c4462613-5936-42cc-a286-82e5f1816793", + "action_sets": [ + { + "uuid": "eca0f1d7-59ef-4a7c-a4a9-9bbd049eb144", + "x": 76, + "y": 99, + "destination": "d21be990-5e48-4e4b-995f-c9df8f38e517", + "actions": [ + { + "type": "add_group", + "uuid": "feb7a33e-bc8b-44d8-9112-bc4e910fe304", + "groups": [ + { + "uuid": "1966e54a-fc30-4a96-81ea-9b0185b8b7de", + "name": "Cat Fanciers" + } + ] + }, + { + "type": "add_group", + "uuid": "ca82f0e0-43ca-426c-a77c-93cf297b8e7c", + "groups": [ + { + "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", + "name": "Catnado" + } + ] + }, + { + "type": "reply", + "uuid": "d57e9e9f-ada4-4a22-99ef-b8bf3dbcdcae", + "msg": { + "eng": "You are a cat fan! Purrrrr." + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "55f88a1e-73ad-4b6d-9a04-626046bbe5a8" + }, + { + "uuid": "ef389049-d2e3-4343-b91f-13ea2db5f943", + "x": 558, + "y": 94, + "destination": "d21be990-5e48-4e4b-995f-c9df8f38e517", + "actions": [ + { + "type": "del_group", + "uuid": "cea907a8-af81-49af-92e6-f246e52179fe", + "groups": [ + { + "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", + "name": "Catnado" + } + ] + }, + { + "type": "reply", + "uuid": "394a328f-f829-43f2-9975-fe2f27c8b786", + "msg": { + "eng": "You are not a cat fan. Hissssss." + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "9ba78afa-948e-44c5-992f-84030f2eaa6b" + }, + { + "uuid": "d21be990-5e48-4e4b-995f-c9df8f38e517", + "x": 319, + "y": 323, + "destination": "35416fea-787d-48c1-b839-76eca089ad2e", + "actions": [ + { + "type": "channel", + "uuid": "78c58574-9f91-4c27-855e-73eacc99c395", + "channel": "bd55bb31-8ed4-4f89-b903-7103aa3762be", + "name": "Telegram: TextItBot" + } + ], + "exit_uuid": "c86638a9-2688-47c9-83ec-7f10ef49de1e" + }, + { + "uuid": "35416fea-787d-48c1-b839-76eca089ad2e", + "x": 319, + "y": 468, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "30d35b8f-f439-482a-91b1-d3b1a4351071", + "msg": { + "eng": "All done." + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "send", + "uuid": "a7b6def8-d315-49bd-82e4-85887f39babe", + "msg": { + "eng": "Hey Cat Fans!" + }, + "contacts": [], + "groups": [ + { + "uuid": "47b1b36c-7736-47b9-b63a-c0ebfb610e61", + "name": "Cat Blasts" + } + ], + "variables": [], + "media": {} + }, + { + "type": "trigger-flow", + "uuid": "540965e5-bdfe-4416-b4dd-449220b1c588", + "flow": { + "uuid": "ef9603ff-3886-4e5e-8870-0f643b6098de", + "name": "Cataclysmic" + }, + "contacts": [], + "groups": [ + { + "uuid": "22a48356-71e9-4ae1-9f93-4021855c0bd5", + "name": "Cat Alerts" + } + ], + "variables": [] + } + ], + "exit_uuid": "f2ef5066-434d-42bc-a5cb-29c59e51432f" + } + ], + "rule_sets": [ + { + "uuid": "c4462613-5936-42cc-a286-82e5f1816793", + "x": 294, + "y": 0, + "label": "Response 1", + "rules": [ + { + "uuid": "17d69564-60c9-4a56-be8b-34e98a2ce14a", + "category": { + "eng": "Cat Facts" + }, + "destination": "eca0f1d7-59ef-4a7c-a4a9-9bbd049eb144", + "destination_type": "A", + "test": { + "type": "in_group", + "test": { + "name": "Cat Facts", + "uuid": "c7bc1eef-b7aa-4959-ab90-3e33e0d3b1f9" + } + }, + "label": null + }, + { + "uuid": "a9ec4d0a-2ddd-4a13-a1d2-c63ce9916a04", + "category": { + "eng": "Other" + }, + "destination": "ef389049-d2e3-4343-b91f-13ea2db5f943", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "group", + "response_type": "", + "operand": "@step.value", + "config": {} + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.5", + "metadata": { + "name": "Cataclysmic", + "saved_on": "2018-10-18T17:03:54.835916Z", + "revision": 49, + "uuid": "ef9603ff-3886-4e5e-8870-0f643b6098de", + "expires": 10080, + "notes": [] + } + }, + { + "entry": "0429d1f9-82ed-4198-80a2-3b213aa11fd5", + "action_sets": [ + { + "uuid": "0429d1f9-82ed-4198-80a2-3b213aa11fd5", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "type": "add_group", + "uuid": "11f61fc6-834e-4cbc-88ee-c834279345e6", + "groups": [ + { + "uuid": "22a48356-71e9-4ae1-9f93-4021855c0bd5", + "name": "Cat Alerts" + }, + { + "uuid": "c7bc1eef-b7aa-4959-ab90-3e33e0d3b1f9", + "name": "Cat Facts" + }, + { + "uuid": "47b1b36c-7736-47b9-b63a-c0ebfb610e61", + "name": "Cat Blasts" + }, + { + "uuid": "1966e54a-fc30-4a96-81ea-9b0185b8b7de", + "name": "Cat Fanciers" + }, + { + "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", + "name": "Catnado" + } + ] + } + ], + "exit_uuid": "029a7c9d-c935-4ed1-9573-543ded29d954" + } + ], + "rule_sets": [], + "base_language": "eng", + "flow_type": "M", + "version": "11.5", + "metadata": { + "name": "Catastrophe", + "saved_on": "2018-10-18T19:03:07.702388Z", + "revision": 1, + "uuid": "d6dd96b1-d500-4c7a-9f9c-eae3f2a2a7c5", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_7.json b/media/test_flows/legacy/migrations/migrate_to_11_7.json new file mode 100644 index 00000000000..108cbf2c376 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_7.json @@ -0,0 +1,246 @@ +{ + "version": "11.6", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "eb59aed8-2eeb-43cd-adfc-9c44721436a2", + "action_sets": [ + { + "uuid": "eb59aed8-2eeb-43cd-adfc-9c44721436a2", + "x": 102, + "y": 0, + "destination": "cd2d8a3e-c267-40ef-8481-37d4076a57d3", + "actions": [ + { + "type": "api", + "uuid": "82d23a8c-af4b-4a33-8d56-03139b1168cc", + "webhook": "http://example.com/hook1", + "action": "GET", + "webhook_headers": [ + { + "name": "Header1", + "value": "Value1" + } + ] + } + ], + "exit_uuid": "787517ce-9a6d-479e-bc81-c3f4dcbb3d1d" + }, + { + "uuid": "cd2d8a3e-c267-40ef-8481-37d4076a57d3", + "x": 149, + "y": 107, + "destination": "efe05d14-7a96-4ec5-870c-5183408821ae", + "actions": [ + { + "type": "reply", + "uuid": "544fd45b-f9a9-4543-b352-06b67dc0c32c", + "msg": { + "eng": "Action before 1" + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "reply", + "uuid": "252b59b0-3664-4a36-8b9f-9317e78011da", + "msg": { + "eng": "Action before 2" + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "api", + "uuid": "55c868c0-f6f7-49a8-856c-809bd082ae3b", + "webhook": "http://example.com/hook2", + "action": "POST", + "webhook_headers": [] + }, + { + "type": "reply", + "uuid": "f7ec546c-9adf-4d51-ab8e-8a1cbde8d910", + "msg": { + "eng": "Action after 1" + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "reply", + "uuid": "a44ec0b8-085d-4e80-b361-7529e659e5e6", + "msg": { + "eng": "Action after 2" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "32c5dba9-17d1-4d5d-a992-19c1ec6cf825" + }, + { + "uuid": "efe05d14-7a96-4ec5-870c-5183408821ae", + "x": 199, + "y": 446, + "destination": "b5ea564c-4acd-4ce4-aeff-37e5c73047e7", + "actions": [ + { + "type": "api", + "uuid": "05377f3c-d9b0-428d-ae14-219d2f3d0f9a", + "webhook": "http://example.com/hook3", + "action": "GET", + "webhook_headers": [] + }, + { + "type": "api", + "uuid": "61fadf6d-d2ba-4bbb-b312-1db3e336a661", + "webhook": "http://example.com/hook4", + "action": "GET", + "webhook_headers": [] + } + ], + "exit_uuid": "c2236afe-c3cb-43a5-9fa0-ee6cbfb92f42" + }, + { + "uuid": "b5ea564c-4acd-4ce4-aeff-37e5c73047e7", + "x": 245, + "y": 608, + "destination": "64d8b8a5-aca0-4406-b417-5827262e67e2", + "actions": [ + { + "type": "reply", + "uuid": "be4dbed8-7334-4700-a94d-50275015c048", + "msg": { + "eng": "Actionset without webhook" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "40b890ab-8fab-459f-8d5e-48d2ea57f7ce" + }, + { + "uuid": "d6da8268-0c61-4154-8659-dd073878541c", + "x": 1036, + "y": 265, + "destination": null, + "actions": [ + { + "type": "api", + "uuid": "b8a8715b-0fb5-4dde-a1fe-4fef045bb16c", + "webhook": "http://example.com/hook5", + "action": "GET", + "webhook_headers": [] + } + ], + "exit_uuid": "15170baf-8b15-4104-990c-13635f0bafbb" + } + ], + "rule_sets": [ + { + "uuid": "64d8b8a5-aca0-4406-b417-5827262e67e2", + "x": 673, + "y": 54, + "label": "Response 1", + "rules": [ + { + "uuid": "4bc64a60-b848-4f07-bbe8-8b82e72b6dea", + "category": { + "eng": "1" + }, + "destination": "eb59aed8-2eeb-43cd-adfc-9c44721436a2", + "destination_type": "A", + "test": { + "type": "contains_any", + "test": { + "eng": "1" + } + }, + "label": null + }, + { + "uuid": "2faff885-6ac4-4cef-bd11-53802be22508", + "category": { + "eng": "2" + }, + "destination": "cd2d8a3e-c267-40ef-8481-37d4076a57d3", + "destination_type": "A", + "test": { + "type": "contains_any", + "test": { + "eng": "2" + } + }, + "label": null + }, + { + "uuid": "05efb767-1319-4f93-ba3f-8d3860a915af", + "category": { + "eng": "3" + }, + "destination": "efe05d14-7a96-4ec5-870c-5183408821ae", + "destination_type": "A", + "test": { + "type": "contains_any", + "test": { + "eng": "3" + } + }, + "label": null + }, + { + "uuid": "2bfbb15e-fb54-41a5-ba43-c67c219e8c57", + "category": { + "eng": "4" + }, + "destination": "b5ea564c-4acd-4ce4-aeff-37e5c73047e7", + "destination_type": "A", + "test": { + "type": "contains_any", + "test": { + "eng": "4" + } + }, + "label": null + }, + { + "uuid": "d091ea29-07b9-48b8-bc52-1de00687af1b", + "category": { + "eng": "Other" + }, + "destination": "d6da8268-0c61-4154-8659-dd073878541c", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "", + "operand": "@step.value", + "config": {} + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.6", + "metadata": { + "name": "Webhook Action Migration", + "saved_on": "2018-11-05T19:21:37.062932Z", + "revision": 61, + "uuid": "c9b9d79a-93b4-41e5-8ca3-f0b09faa2457", + "expires": 10080, + "notes": [] + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_8.json b/media/test_flows/legacy/migrations/migrate_to_11_8.json new file mode 100644 index 00000000000..db0f0f372c4 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_8.json @@ -0,0 +1,341 @@ +{ + "version": 11.7, + "site": null, + "flows": [ + { + "entry": "fde99613-a3e9-4f97-9e88-81ebc0ea6211", + "action_sets": [ + { + "uuid": "788064a1-fe23-4f6e-8041-200412dff55e", + "x": 389, + "y": 991, + "destination": "d8be5901-e847-4b6f-a603-51eb571718a1", + "actions": [ + { + "type": "reply", + "uuid": "fdee102d-5259-4153-8e43-0b7df1d3a1ee", + "msg": { + "base": "Thanks @extra.name, we'll be in touch ASAP about order # @extra.order." + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "email", + "uuid": "66c4a60f-3d63-4eed-bd03-c801baa0d793", + "emails": [ + "rowanseymour@gmail.com" + ], + "subject": "Order Comment: @flow.lookup: @extra.order", + "msg": "Customer @extra.name has a problem with their order @extra.order for @extra.description. Please look into it ASAP and call them back with the status.\n \nCustomer Comment: \"@flow.comment\"\nCustomer Name: @extra.name\nCustomer Phone: @contact.tel " + } + ], + "exit_uuid": "b193a69a-d5d9-423a-9f1f-0ad51847a075" + }, + { + "uuid": "1bdc3242-ef13-4c1b-a3b1-11554bffff7a", + "x": 612, + "y": 574, + "destination": "691e8175-f6a1-45b3-b377-c8bda223e52b", + "actions": [ + { + "type": "reply", + "uuid": "fc90459d-243c-4207-a26b-258e2c42cff3", + "msg": { + "base": "Uh oh @extra.name! Our record indicate that your order for @extra.description was cancelled on @extra.cancel_date. If you think this is in error, please reply with a comment and our orders department will get right on it!" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "7e943c3d-b560-436f-bd7e-5c52e9162254" + }, + { + "uuid": "601c7150-7a3e-40aa-8f79-92f936e17cf9", + "x": 389, + "y": 572, + "destination": "691e8175-f6a1-45b3-b377-c8bda223e52b", + "actions": [ + { + "type": "reply", + "uuid": "459ed2db-9921-4326-87a1-5157e0a9b38a", + "msg": { + "base": "Hi @extra.name. Hope you are patient because we haven't shipped your order for @extra.description yet. We expect to ship it by @extra.ship_date though. If you have any questions, just reply and our customer service department will be notified." + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "5747ab91-d20c-4fff-8246-9d29a6ef1511" + }, + { + "uuid": "f87e2df4-5cbb-4961-b3c9-41eed35f8dbe", + "x": 167, + "y": 572, + "destination": "691e8175-f6a1-45b3-b377-c8bda223e52b", + "actions": [ + { + "type": "reply", + "uuid": "661ac1e4-2f13-48b1-adcf-0ff151833a86", + "msg": { + "base": "Great news @extra.name! We shipped your order for @extra.description on @extra.ship_date and we expect it will be delivered on @extra.delivery_date. If you have any questions, just reply and our customer service department will be notified." + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "aee36df6-a421-43b9-be55-a4a298c35f86" + }, + { + "uuid": "81c3ff98-3552-4962-ab05-8f7948ebac24", + "x": 787, + "y": 99, + "destination": "659f67c6-cf6d-4d43-bd64-a50318fd5168", + "actions": [ + { + "type": "reply", + "uuid": "7645e8cd-34a1-44d0-8b11-7f4f06bd5ac7", + "msg": { + "base": "Sorry that doesn't look like a valid order number. Maybe try: CU001, CU002 or CU003?" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "b6e7b7f2-88e5-4457-ba7b-6edb9fb81d9f" + }, + { + "uuid": "fde99613-a3e9-4f97-9e88-81ebc0ea6211", + "x": 409, + "y": 0, + "destination": "659f67c6-cf6d-4d43-bd64-a50318fd5168", + "actions": [ + { + "type": "reply", + "uuid": "c007a761-85c7-48eb-9b38-8d056d1d44ee", + "msg": { + "base": "Thanks for contacting the ThriftShop order status system. Please send your order # and we'll help you in a jiffy!" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "0a300e24-c7fa-473d-b06e-2826fa25b447" + } + ], + "rule_sets": [ + { + "uuid": "691e8175-f6a1-45b3-b377-c8bda223e52b", + "x": 389, + "y": 875, + "label": "Comment", + "rules": [ + { + "uuid": "567cac39-5ee4-4dac-b29a-97dfef2a2eb1", + "category": { + "base": "All Responses" + }, + "destination": "788064a1-fe23-4f6e-8041-200412dff55e", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "O", + "operand": "@step.value", + "config": {} + }, + { + "uuid": "659f67c6-cf6d-4d43-bd64-a50318fd5168", + "x": 356, + "y": 198, + "label": "Lookup Response", + "rules": [ + { + "uuid": "24b3a3a5-1ce8-45d4-87e5-0fa0159a9cab", + "category": { + "base": "All Responses" + }, + "destination": "541382fd-e897-4f77-b468-1f2c7bacf30c", + "destination_type": "R", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "C", + "operand": "@step.value", + "config": {} + }, + { + "uuid": "d8be5901-e847-4b6f-a603-51eb571718a1", + "x": 389, + "y": 1252, + "label": "Extra Comments", + "rules": [ + { + "uuid": "bba334ec-321e-4ead-8d1d-f34d7bc983ad", + "category": { + "base": "All Responses" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "O", + "operand": "@step.value", + "config": {} + }, + { + "uuid": "726f6b34-d6be-46fa-8061-bf1f081b15ce", + "x": 356, + "y": 398, + "label": "Lookup", + "rules": [ + { + "uuid": "d26ac82f-90dc-4f95-b105-7d3ca4effc20", + "category": { + "base": "Shipped" + }, + "destination": "f87e2df4-5cbb-4961-b3c9-41eed35f8dbe", + "destination_type": "A", + "test": { + "type": "contains", + "test": { + "base": "Shipped" + } + }, + "label": null + }, + { + "uuid": "774e6911-cb63-4700-99bc-5e16966393b8", + "category": { + "base": "Pending" + }, + "destination": "601c7150-7a3e-40aa-8f79-92f936e17cf9", + "destination_type": "A", + "test": { + "type": "contains", + "test": { + "base": "Pending" + } + }, + "label": null + }, + { + "uuid": "fee4858c-2545-435b-ae65-d9e6b8f8d106", + "category": { + "base": "Cancelled" + }, + "destination": "1bdc3242-ef13-4c1b-a3b1-11554bffff7a", + "destination_type": "A", + "test": { + "type": "contains", + "test": { + "base": "Cancelled" + } + }, + "label": null + }, + { + "uuid": "24b3a3a5-1ce8-45d4-87e5-0fa0159a9cab", + "category": { + "base": "Other" + }, + "destination": "81c3ff98-3552-4962-ab05-8f7948ebac24", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "expression", + "response_type": "", + "operand": "@extra.status", + "config": {} + }, + { + "uuid": "541382fd-e897-4f77-b468-1f2c7bacf30c", + "x": 356, + "y": 298, + "label": "Lookup Webhook", + "rules": [ + { + "uuid": "24b3a3a5-1ce8-45d4-87e5-0fa0159a9cab", + "category": { + "base": "Success" + }, + "destination": "726f6b34-d6be-46fa-8061-bf1f081b15ce", + "destination_type": "R", + "test": { + "type": "webhook_status", + "status": "success" + }, + "label": null + }, + { + "uuid": "008f4050-7979-42d5-a2cb-d1b4f6bc144f", + "category": { + "base": "Failure" + }, + "destination": "726f6b34-d6be-46fa-8061-bf1f081b15ce", + "destination_type": "R", + "test": { + "type": "webhook_status", + "status": "failure" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "webhook", + "response_type": "", + "operand": "@step.value", + "config": { + "webhook": "https://textit.in/demo/status/", + "webhook_action": null + } + } + ], + "base_language": "base", + "flow_type": "M", + "version": "11.7", + "metadata": { + "notes": [ + { + "body": "This flow demonstrates looking up an order using a webhook and giving the user different options based on the results. After looking up the order the user has the option to send additional comments which are forwarded to customer support representatives.\n\nUse order numbers CU001, CU002 or CU003 to see the different cases in action.", + "x": 59, + "y": 0, + "title": "Using Your Own Data" + } + ], + "saved_on": "2019-01-09T18:29:40.288510Z", + "uuid": "3825c65e-5aa8-4619-8de9-963f68483cb3", + "name": "Sample Flow - Order Status Checker", + "revision": 11, + "expires": 720 + } + } + ] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_11_9.json b/media/test_flows/legacy/migrations/migrate_to_11_9.json new file mode 100644 index 00000000000..b1c3f8b56a5 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_11_9.json @@ -0,0 +1,458 @@ +{ + "version": "11.8", + "site": "https://app.rapidpro.io", + "flows": [ + { + "entry": "edea0cb4-00b9-4a53-a923-f4aa38cf18c5", + "action_sets": [ + { + "uuid": "edea0cb4-00b9-4a53-a923-f4aa38cf18c5", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "0b6745ce-6d8b-40d4-bb4f-f18f407bdcdf", + "msg": { + "base": "hi valid" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "d79e8a16-62df-4b48-aff9-fae2633f2b77" + } + ], + "rule_sets": [], + "base_language": "base", + "flow_type": "M", + "version": "11.8", + "metadata": { + "name": "Valid1", + "saved_on": "2018-12-17T12:08:54.146452Z", + "revision": 2, + "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3", + "expires": 10080, + "ivr_retry_failed_events": null + } + }, + { + "entry": "d3e2b506-50cd-4c1e-9573-295bd2087258", + "action_sets": [ + { + "uuid": "d3e2b506-50cd-4c1e-9573-295bd2087258", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "d17b512e-87ed-4717-9461-bc2ffde23b77", + "msg": { + "base": "Hi flow invalid 1" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "c231d9a6-3b53-4670-ac41-247736126ffd" + } + ], + "rule_sets": [], + "base_language": "base", + "flow_type": "M", + "version": "11.8", + "metadata": { + "name": "Invalid1", + "saved_on": "2018-12-17T12:09:52.155509Z", + "revision": 2, + "uuid": "ad40071e-a665-4df3-af14-0bc0fe589244", + "expires": 10080, + "ivr_retry_failed_events": null + } + }, + { + "entry": "932b19a7-245c-4a2b-9249-66d4eb7cfdf7", + "action_sets": [ + { + "uuid": "932b19a7-245c-4a2b-9249-66d4eb7cfdf7", + "x": 100, + "y": 0, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "2f6cdd62-9b29-4597-8af0-3dd410ae46f0", + "msg": { + "base": "Hi flow invalid two" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "0035ccc2-6359-4954-bbc9-bddb90076c25" + } + ], + "rule_sets": [], + "base_language": "base", + "flow_type": "M", + "version": "11.8", + "metadata": { + "name": "Invalid2", + "saved_on": "2018-12-17T12:10:13.269437Z", + "revision": 3, + "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", + "expires": 10080, + "ivr_retry_failed_events": null + } + }, + { + "entry": "544e4ef3-4c54-4bb0-8f89-a1e098b3f030", + "action_sets": [ + { + "uuid": "544e4ef3-4c54-4bb0-8f89-a1e098b3f030", + "x": 375, + "y": 1, + "destination": "a7de0caa-5ab0-4edc-8fc8-33eb31f79cba", + "actions": [ + { + "type": "reply", + "uuid": "64ce02e3-8ea8-414a-a7cf-7f5d3938aa03", + "msg": { + "base": "Hi" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "33d4947d-bdda-4226-bc27-55a6b6e56b36" + }, + { + "uuid": "c9e48e85-b91c-4e2b-bb17-fb670f1559c0", + "x": 420, + "y": 622, + "destination": "861c6312-b2a8-4586-8688-6621d7065497", + "actions": [ + { + "type": "reply", + "uuid": "24b49e66-2520-4f9c-a6f7-f7a43793db53", + "msg": { + "base": "tnx" + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "trigger-flow", + "uuid": "bf31a0f8-73d8-4c81-8f90-ea0d4008a212", + "flow": { + "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", + "name": "Invalid2" + }, + "contacts": [], + "groups": [ + { + "uuid": "ad17d536-0085-4e6b-abc6-222b22d57caa", + "name": "Empty" + } + ], + "variables": [] + } + ], + "exit_uuid": "c8b6b99d-1af5-4bd1-b2d8-b3e87de702e8" + }, + { + "uuid": "6adc7de8-6a84-490a-b3d3-3d1ec607d465", + "x": 580, + "y": 316, + "destination": null, + "actions": [ + { + "type": "reply", + "uuid": "09f0ddee-c27e-4397-bb3c-4a6cf35da77a", + "msg": { + "base": "tyvm" + }, + "media": {}, + "quick_replies": [], + "send_all": false + }, + { + "type": "flow", + "uuid": "95e9750f-9bf4-4ae9-aa07-5c4cde604956", + "flow": { + "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", + "name": "Invalid2" + } + } + ], + "exit_uuid": "1f5d4b7e-7ceb-47cf-91e7-94790f63c9db" + }, + { + "uuid": "ed891a32-6e6d-49b1-88d0-399d2002bce0", + "x": 323, + "y": 993, + "destination": null, + "actions": [ + { + "type": "flow", + "uuid": "70acb970-8b3a-47d0-9fb6-56c5974a582b", + "flow": { + "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3", + "name": "Valid1" + } + } + ], + "exit_uuid": "b5bfc0a1-701e-4e19-ad3e-bf9d7470b241" + }, + { + "uuid": "a750fe69-167b-4ae3-af72-7aae4c2d8b1a", + "x": 598, + "y": 993, + "destination": null, + "actions": [ + { + "type": "flow", + "uuid": "9b0f11bd-fbda-4efe-a41a-8ef101412d95", + "flow": { + "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", + "name": "Invalid2" + } + } + ], + "exit_uuid": "1231547d-8f57-4a12-82d1-b1bf3e664010" + }, + { + "uuid": "0bfb7527-c6e9-4452-b780-6755d2041144", + "x": 576, + "y": 169, + "destination": null, + "actions": [ + { + "type": "flow", + "uuid": "e8e85830-1aef-4947-af91-1a2653f3627d", + "flow": { + "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3", + "name": "Valid1" + } + } + ], + "exit_uuid": "8d2c4101-330a-49f7-a4e5-972513c1a995" + } + ], + "rule_sets": [ + { + "uuid": "a7de0caa-5ab0-4edc-8fc8-33eb31f79cba", + "x": 61, + "y": 190, + "label": "Response 1", + "rules": [ + { + "uuid": "a16424a4-95df-4839-813a-bf6bee37f735", + "category": { + "base": "1" + }, + "destination": "9baa6aaf-61bf-4686-8059-1c373a43e5a6", + "destination_type": "R", + "test": { + "type": "eq", + "test": "1" + }, + "label": null + }, + { + "uuid": "f6b45161-f1fe-475f-a4db-7eb300f26415", + "category": { + "base": "2" + }, + "destination": "c9e48e85-b91c-4e2b-bb17-fb670f1559c0", + "destination_type": "A", + "test": { + "type": "eq", + "test": "2" + }, + "label": null + }, + { + "uuid": "59bfd40b-8b94-4555-ac2e-e6883d280df2", + "category": { + "base": "3" + }, + "destination": "6adc7de8-6a84-490a-b3d3-3d1ec607d465", + "destination_type": "A", + "test": { + "type": "eq", + "test": "3" + }, + "label": null + }, + { + "uuid": "7b25509c-94c4-45c1-86cf-2995916ac825", + "category": { + "base": "Other" + }, + "destination": "0bfb7527-c6e9-4452-b780-6755d2041144", + "destination_type": "A", + "test": { + "type": "true" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "wait_message", + "response_type": "", + "operand": "@step.value", + "config": {} + }, + { + "uuid": "9baa6aaf-61bf-4686-8059-1c373a43e5a6", + "x": 51, + "y": 659, + "label": "Response 2", + "rules": [ + { + "uuid": "049a4d45-d50d-468a-ae61-9e55c5dda0ea", + "category": { + "base": "Completed" + }, + "destination": "54c31965-d727-4b0a-a37e-6231551343dc", + "destination_type": "R", + "test": { + "type": "subflow", + "exit_type": "completed" + }, + "label": null + }, + { + "uuid": "101dea88-83bf-4219-973b-d11de45589ae", + "category": { + "base": "Expired" + }, + "destination": "544e4ef3-4c54-4bb0-8f89-a1e098b3f030", + "destination_type": "A", + "test": { + "type": "subflow", + "exit_type": "expired" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "subflow", + "response_type": "", + "operand": "@step.value", + "config": { + "flow": { + "name": "Invalid1", + "uuid": "ad40071e-a665-4df3-af14-0bc0fe589244" + } + } + }, + { + "uuid": "54c31965-d727-4b0a-a37e-6231551343dc", + "x": 36, + "y": 875, + "label": "Response 3", + "rules": [ + { + "uuid": "6a9a30cc-0400-4148-b760-ff342d7ef496", + "category": { + "base": "Completed" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "subflow", + "exit_type": "completed" + }, + "label": null + }, + { + "uuid": "e9e0ad89-6d63-4744-ba35-8042af052a95", + "category": { + "base": "Expired" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "subflow", + "exit_type": "expired" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "subflow", + "response_type": "", + "operand": "@step.value", + "config": { + "flow": { + "name": "Valid1", + "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3" + } + } + }, + { + "uuid": "861c6312-b2a8-4586-8688-6621d7065497", + "x": 409, + "y": 863, + "label": "Response 4", + "rules": [ + { + "uuid": "451cb651-7a59-4d2b-bfe5-753643ad7db2", + "category": { + "base": "1" + }, + "destination": "ed891a32-6e6d-49b1-88d0-399d2002bce0", + "destination_type": "A", + "test": { + "type": "between", + "min": "0", + "max": "0.5" + }, + "label": null + }, + { + "uuid": "39c05550-91a3-4497-9595-2478b5ab6ae4", + "category": { + "base": "2" + }, + "destination": "a750fe69-167b-4ae3-af72-7aae4c2d8b1a", + "destination_type": "A", + "test": { + "type": "between", + "min": "0.5", + "max": "1" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "random", + "response_type": "", + "operand": "@(RAND())", + "config": {} + } + ], + "base_language": "base", + "flow_type": "M", + "version": "11.8", + "metadata": { + "name": "Master", + "saved_on": "2018-12-17T13:54:21.769976Z", + "revision": 56, + "uuid": "8d3f72ef-60b9-4902-b792-d664df502f3f", + "expires": 10080 + } + } + ], + "campaigns": [], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/migrate_to_9.json b/media/test_flows/legacy/migrations/migrate_to_9.json new file mode 100644 index 00000000000..908081987d9 --- /dev/null +++ b/media/test_flows/legacy/migrations/migrate_to_9.json @@ -0,0 +1,148 @@ +{ + "campaigns": [ + { + "events": [ + { + "event_type": "M", + "relative_to": { + "id": 1134, + "key": "next_appointment", + "label": "Next Show" + }, + "flow": { + "name": "Single Message", + "id": 2814 + }, + "offset": -1, + "delivery_hour": -1, + "message": "Hi there, your next show is @contact.next_show. Don't miss it!", + "id": 9959, + "unit": "H" + } + ], + "group": { + "name": "Pending Appointments", + "id": 2308 + }, + "id": 405, + "name": "Appointment Schedule" + } + ], + "version": 9, + "site": "https://app.rapidpro.io", + "flows": [ + { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "a04f3046-e053-444f-b018-eff019766ad9", + "uuid": "e4a03298-dd43-4afb-b185-2782fc36a006", + "actions": [ + { + "msg": { + "base": "Hi there!" + }, + "type": "reply" + }, + { + "uuid": "c756af8f-4480-4a91-875d-c0600597c0ae", + "contacts": [ + { + "id": contact_id, + "name": "Trey Anastasio" + } + ], + "groups": [], + "variables": [], + "msg": { + "base": "You're phantastic" + }, + "action": "GET", + "type": "send" + }, + { + "labels": [ + { + "name": "this label", + "id": label_id + } + ], + "type": "add_label" + }, + { + "field": "concat_test", + "type": "save", + "value": "@(CONCAT(extra.flow.divided, extra.flow.sky))", + "label": "Concat Test" + }, + { + "field": "normal_test", + "type": "save", + "value": "@extra.contact.name", + "label": "Normal Test" + } + ] + }, + { + "y": 142, + "x": 166, + "destination": null, + "uuid": "a04f3046-e053-444f-b018-eff019766ad9", + "actions": [ + { + "type": "add_group", + "groups": [ + { + "name": "Survey Audience", + "id": group_id + }, + "@(\"Phans\")", + "Survey Audience" + ] + }, + { + "type": "del_group", + "groups": [ + { + "name": "Unsatisfied Customers", + "id": group_id + } + ] + }, + { + "name": "Test flow", + "contacts": [], + "variables": [ + { + "id": "@contact.tel_e164" + } + ], + "groups": [], + "type": "trigger-flow", + "id": start_flow_id + }, + { + "type": "flow", + "name": "Parent Flow", + "id": start_flow_id + } + ] + } + ], + "version": 9, + "flow_type": "F", + "entry": "e4a03298-dd43-4afb-b185-2782fc36a006", + "rule_sets": [], + "metadata": { + "expires": 10080, + "revision": 11, + "id": previous_flow_id, + "name": "Migrate to 9", + "saved_on": "2016-06-22T15:05:12.074490Z" + } + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/multi_language_flow.json b/media/test_flows/legacy/migrations/multi_language_flow.json new file mode 100644 index 00000000000..d4fadda4704 --- /dev/null +++ b/media/test_flows/legacy/migrations/multi_language_flow.json @@ -0,0 +1,176 @@ +{ + "version": 4, + "flows": [ + { + "definition": { + "base_language": "eng", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "c969c5ba-8595-4e2c-86d0-c2e375afe3e0", + "uuid": "d563e7ca-aa0f-4615-ba8c-eab5e13ff4bf", + "actions": [ + { + "msg": { + "spa": "\u00a1Hola amigo! \u00bfCu\u00e1l es tu color favorito?", + "eng": "Hello friend! What is your favorite color?" + }, + "type": "reply" + } + ] + }, + { + "y": 266, + "x": 351, + "destination": null, + "uuid": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", + "actions": [ + { + "msg": { + "spa": "\u00a1Gracias! Me gusta @flow.color.", + "eng": "Thank you! I like @flow.color." + }, + "type": "reply" + }, + { + "msg": { + "eng": "This message was not translated." + }, + "type": "reply" + } + ] + }, + { + "y": 179, + "x": 683, + "destination": "c969c5ba-8595-4e2c-86d0-c2e375afe3e0", + "uuid": "6ea52610-838c-4f64-8e24-99754135da67", + "actions": [ + { + "msg": { + "spa": "Por favor, una vez m\u00e1s", + "eng": "Please try again." + }, + "type": "reply" + } + ] + } + ], + "last_saved": "2015-02-19T05:55:32.232993Z", + "entry": "d563e7ca-aa0f-4615-ba8c-eab5e13ff4bf", + "rule_sets": [ + { + "uuid": "c969c5ba-8595-4e2c-86d0-c2e375afe3e0", + "webhook_action": null, + "rules": [ + { + "test": { + "test": { + "spa": "rojo", + "eng": "Red" + }, + "base": "Red", + "type": "contains_any" + }, + "category": { + "spa": "Rojo", + "base": "Red", + "eng": "Red" + }, + "destination": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", + "config": { + "type": "contains_any", + "verbose_name": "has any of these words", + "name": "Contains any", + "localized": true, + "operands": 1 + }, + "uuid": "de555b2c-2616-49ff-8564-409a01b0bd79" + }, + { + "test": { + "test": { + "spa": "verde", + "eng": "Green" + }, + "base": "Green", + "type": "contains_any" + }, + "category": { + "spa": "Verde", + "base": "Green", + "eng": "Green" + }, + "destination": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", + "config": { + "type": "contains_any", + "verbose_name": "has any of these words", + "name": "Contains any", + "localized": true, + "operands": 1 + }, + "uuid": "e09c7ad3-46c8-4024-9fcf-8a0d26d97d6a" + }, + { + "test": { + "test": { + "spa": "azul", + "eng": "Blue" + }, + "base": "Blue", + "type": "contains_any" + }, + "category": { + "spa": "Azul", + "base": "Blue", + "eng": "Blue" + }, + "destination": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", + "config": { + "type": "contains_any", + "verbose_name": "has any of these words", + "name": "Contains any", + "localized": true, + "operands": 1 + }, + "uuid": "aafd9e60-4d74-40cb-a923-3501560cb5c1" + }, + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "spa": "Otro", + "base": "Other", + "eng": "Other" + }, + "destination": "6ea52610-838c-4f64-8e24-99754135da67", + "config": { + "type": "true", + "verbose_name": "contains anything", + "name": "Other", + "operands": 0 + }, + "uuid": "2263684a-0354-448e-8213-c57644e91798" + } + ], + "webhook": null, + "label": "Color", + "operand": "@step.value", + "finished_key": null, + "response_type": "C", + "y": 132, + "x": 242 + } + ], + "metadata": {} + }, + "id": 1400, + "flow_type": "F", + "name": "Multi Language Flow" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/old_expressions.json b/media/test_flows/legacy/migrations/old_expressions.json new file mode 100644 index 00000000000..17ef733d82e --- /dev/null +++ b/media/test_flows/legacy/migrations/old_expressions.json @@ -0,0 +1,118 @@ +{ + "version": 7, + "flows": [ + { + "definition": { + "base_language": "eng", + "action_sets": [ + { + "y": 0, + "x": 100, + "destination": "a32d0ebb-57aa-452e-bd8d-ae5febee4440", + "uuid": "a26285b1-134b-421b-9853-af0f26d13777", + "actions": [ + { + "msg": { + "eng": "Hi @contact.name|upper_case. Today is =(date.now)" + }, + "type": "reply" + } + ] + }, + { + "y": 350, + "x": 164, + "destination": null, + "uuid": "054d9e01-8e68-4f6d-9cf3-44407256670e", + "actions": [ + { + "type": "add_group", + "groups": [ + "=flow.response_1.category" + ] + }, + { + "msg": { + "eng": "Was @contact.name|lower_case|title_case." + }, + "variables": [ + { + "id": "=flow.response_1.category" + } + ], + "type": "send", + "groups": [], + "contacts": [] + } + ] + } + ], + "last_saved": "2015-09-23T07:54:10.928652Z", + "entry": "a26285b1-134b-421b-9853-af0f26d13777", + "rule_sets": [ + { + "uuid": "a32d0ebb-57aa-452e-bd8d-ae5febee4440", + "webhook_action": "GET", + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "eng": "All Responses" + }, + "destination": "028c71a3-0696-4d98-8ff3-0dc700811124", + "uuid": "bf879f78-aff8-4c64-9326-e92f677af5cf", + "destination_type": "R" + } + ], + "webhook": "http://example.com/query.php?contact=@contact.name|upper_case", + "ruleset_type": "webhook", + "label": "Response 1", + "operand": "=(step.value)", + "finished_key": null, + "response_type": "", + "y": 134, + "x": 237, + "config": {} + }, + { + "uuid": "028c71a3-0696-4d98-8ff3-0dc700811124", + "webhook_action": null, + "rules": [ + { + "test": { + "test": "true", + "type": "true" + }, + "category": { + "eng": "All Responses" + }, + "destination": "054d9e01-8e68-4f6d-9cf3-44407256670e", + "uuid": "35ba932c-d45a-4cf5-bd0b-41fd9b80cc27", + "destination_type": "A" + } + ], + "webhook": null, + "ruleset_type": "expression", + "label": "Response 2", + "operand": "@step.value|time_delta:\"3\"", + "finished_key": null, + "response_type": "", + "y": 240, + "x": 203, + "config": {} + } + ], + "type": "F", + "metadata": {} + }, + "expires": 10080, + "id": 31427, + "flow_type": "F", + "name": "Old Expressions" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/single_message_bad_localization.json b/media/test_flows/legacy/migrations/single_message_bad_localization.json new file mode 100644 index 00000000000..8c13b95e015 --- /dev/null +++ b/media/test_flows/legacy/migrations/single_message_bad_localization.json @@ -0,0 +1,25 @@ +{ + "version": 10, + "flows": [ + { + "base_language": "eng", + "rule_sets": [], + "action_sets": [ + { + "y": 0, + "x": 100, + "uuid": "37fe93f8-edf5-40f3-b029-3b391fa528d0", + "actions": [ + { + "msg": "Campaign Message 12", + "type": "reply", + "uuid": "9bdb1aab-e42e-4585-8395-6504c4a683ed" + } + ] + } + ], + "entry": "37fe93f8-edf5-40f3-b029-3b391fa528d0" + } + ], + "triggers": [] +} \ No newline at end of file diff --git a/media/test_flows/legacy/migrations/type_flow.json b/media/test_flows/legacy/migrations/type_flow.json new file mode 100644 index 00000000000..ed3976e9958 --- /dev/null +++ b/media/test_flows/legacy/migrations/type_flow.json @@ -0,0 +1,394 @@ +{ + "campaigns": [], + "version": "10.1", + "site": "https://app.rapidpro.io", + "flows": [ + { + "base_language": "base", + "action_sets": [ + { + "y": 0, + "x": 92, + "destination": "9c941ba5-e4df-47e0-9a4f-594986ae1b1a", + "uuid": "bc3da5f2-6fe5-41f1-ac0e-ec2701189ef2", + "actions": [ + { + "msg": { + "base": "Hey @contact.nickname, you joined on @contact.joined_on in @contact.district." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "4dc98ff5-8d86-45f5-8336-8949029e893e" + }, + { + "msg": { + "base": "It's @date. The time is @date.now on @date.today." + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "058e5d4a-3447-49d9-a033-ebe3010b5875" + }, + { + "msg": { + "base": "Send text" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "9568e1c8-04f2-45ef-a477-4521d19bfaf6" + } + ] + }, + { + "y": 257, + "x": 78, + "destination": "a4904b78-08b8-42fd-9479-27bcb1764bc4", + "uuid": "dac0c91f-3f3f-43d5-a2d9-5c1059998134", + "actions": [ + { + "msg": { + "base": "You said @flow.text at @flow.text.time. Send date" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "6f4fc213-3037-49e5-ac45-b956c48fd546" + } + ] + }, + { + "y": 540, + "x": 95, + "destination": "9994619b-e68d-4c94-90d6-af19fb944f7d", + "uuid": "9bbdc63c-4385-44e1-b573-a127f50d3d34", + "actions": [ + { + "msg": { + "base": "You said @flow.date which was in category @flow.date.category Send number" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "7177ef30-33ca-4b25-8af7-3213e0483b56" + } + ] + }, + { + "y": 825, + "x": 96, + "destination": "01cc820b-c516-4e68-8903-aa69866b11b6", + "uuid": "a4a37023-de22-4ac4-b431-da2a333c93cd", + "actions": [ + { + "msg": { + "base": "You said @flow.number. Send state" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "34d622bc-e2ad-44aa-b047-cfb38e2dc2cc" + } + ] + }, + { + "y": 1084, + "x": 94, + "destination": "9769918c-8ca4-4ec5-8b5b-bf94cc6746a9", + "uuid": "7e8dfcd5-6510-4060-9608-2c8faa3a8e0a", + "actions": [ + { + "msg": { + "base": "You said @flow.state which was in category @flow.state.category. Send district" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "a4428571-9b86-49b8-97e1-6ffee3cddbaa" + } + ] + }, + { + "y": 1460, + "x": 73, + "destination": "ea2244de-7b23-4fbb-8f99-38cde3100de8", + "uuid": "605e2fe7-321a-4cce-b97b-877d75bd3b12", + "actions": [ + { + "msg": { + "base": "You said @flow.district. Send ward" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "5f8eb5aa-249b-4718-a502-8406dd0ae418" + } + ] + }, + { + "y": 1214, + "x": 284, + "destination": "498b1953-02f1-47dd-b9cb-1b51913e348f", + "uuid": "9769918c-8ca4-4ec5-8b5b-bf94cc6746a9", + "actions": [ + { + "msg": { + "base": "You said @flow.ward.", + "fre": "Tu as dit @flow.ward" + }, + "media": {}, + "send_all": false, + "type": "reply", + "uuid": "b95b88c8-a85c-4bac-931d-310d678c286a" + }, + { + "lang": "fre", + "type": "lang", + "name": "French", + "uuid": "56a4bca5-b9e5-4d04-883c-ca65d7c4d538" + } + ] + } + ], + "version": "10.1", + "flow_type": "F", + "entry": "bc3da5f2-6fe5-41f1-ac0e-ec2701189ef2", + "rule_sets": [ + { + "uuid": "9c941ba5-e4df-47e0-9a4f-594986ae1b1a", + "rules": [ + { + "category": { + "base": "All Responses" + }, + "uuid": "a4682f52-7869-4e64-bf9f-8d2c0a341d19", + "destination": "dac0c91f-3f3f-43d5-a2d9-5c1059998134", + "label": null, + "destination_type": "A", + "test": { + "type": "true" + } + } + ], + "ruleset_type": "wait_message", + "label": "Text", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 146, + "x": 265, + "config": {} + }, + { + "uuid": "a4904b78-08b8-42fd-9479-27bcb1764bc4", + "rules": [ + { + "category": { + "base": "is a date" + }, + "uuid": "e410616b-b5cd-4fd1-af42-9c6b6c9fe282", + "destination": "9bbdc63c-4385-44e1-b573-a127f50d3d34", + "label": null, + "destination_type": "A", + "test": { + "type": "date" + } + }, + { + "category": { + "base": "Other" + }, + "uuid": "a720d0b1-0686-47be-a306-1543e470c6de", + "destination": "dac0c91f-3f3f-43d5-a2d9-5c1059998134", + "label": null, + "destination_type": "A", + "test": { + "type": "true" + } + } + ], + "ruleset_type": "wait_message", + "label": "Date", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 391, + "x": 273, + "config": {} + }, + { + "uuid": "9994619b-e68d-4c94-90d6-af19fb944f7d", + "rules": [ + { + "category": { + "base": "numeric" + }, + "uuid": "c4881d22-57aa-4964-abbc-aaf26b875614", + "destination": "a4a37023-de22-4ac4-b431-da2a333c93cd", + "label": null, + "destination_type": "A", + "test": { + "type": "number" + } + }, + { + "category": { + "base": "Other" + }, + "uuid": "6cd3fb0c-070d-4060-bafc-badaebe5134e", + "destination": null, + "label": null, + "destination_type": null, + "test": { + "type": "true" + } + } + ], + "ruleset_type": "wait_message", + "label": "Number", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 679, + "x": 267, + "config": {} + }, + { + "uuid": "01cc820b-c516-4e68-8903-aa69866b11b6", + "rules": [ + { + "category": { + "base": "state" + }, + "uuid": "4ef398b1-d3f1-4023-b608-8803cc05dd20", + "destination": "7e8dfcd5-6510-4060-9608-2c8faa3a8e0a", + "label": null, + "destination_type": "A", + "test": { + "type": "state" + } + }, + { + "category": { + "base": "Other" + }, + "uuid": "38a4583c-cf73-454c-80e5-09910cf92f4b", + "destination": null, + "label": null, + "destination_type": null, + "test": { + "type": "true" + } + } + ], + "ruleset_type": "wait_message", + "label": "State", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 956, + "x": 271, + "config": {} + }, + { + "uuid": "498b1953-02f1-47dd-b9cb-1b51913e348f", + "rules": [ + { + "category": { + "base": "district", + "fre": "le district" + }, + "uuid": "47147597-00c6-44bc-95d2-bebec9f1a45b", + "destination": "605e2fe7-321a-4cce-b97b-877d75bd3b12", + "label": null, + "destination_type": "A", + "test": { + "test": "@flow.state", + "type": "district" + } + }, + { + "category": { + "base": "Other" + }, + "uuid": "1145c620-2512-4228-b561-80024bbd91ee", + "destination": null, + "label": null, + "destination_type": null, + "test": { + "type": "true" + } + } + ], + "ruleset_type": "wait_message", + "label": "District", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 1355, + "x": 266, + "config": {} + }, + { + "uuid": "ea2244de-7b23-4fbb-8f99-38cde3100de8", + "rules": [ + { + "category": { + "base": "ward" + }, + "uuid": "b5159826-a55a-4803-a656-64d47803e8bf", + "destination": null, + "label": null, + "destination_type": null, + "test": { + "state": "@flow.state.", + "type": "ward", + "district": "@flow.district" + } + }, + { + "category": { + "base": "Other" + }, + "uuid": "c1aa2a53-4d85-4fdd-953e-7e24b06cc7ea", + "destination": null, + "label": null, + "destination_type": null, + "test": { + "type": "true" + } + } + ], + "ruleset_type": "wait_message", + "label": "Ward", + "operand": "@step.value", + "finished_key": null, + "response_type": "", + "y": 1584, + "x": 268, + "config": {} + } + ], + "metadata": { + "expires": 10080, + "revision": 19, + "uuid": "d7468d97-b8d7-482e-a09c-d0bfe839c555", + "name": "Type Flow", + "saved_on": "2017-10-30T19:38:39.814935Z" + } + } + ], + "triggers": [ + { + "trigger_type": "K", + "flow": { + "name": "Type Flow", + "uuid": "d7468d97-b8d7-482e-a09c-d0bfe839c555" + }, + "groups": [], + "keyword": "types", + "channel": null + } + ] +} \ No newline at end of file diff --git a/media/test_flows/loop_detection.json b/media/test_flows/loop_detection.json deleted file mode 100644 index 91542485279..00000000000 --- a/media/test_flows/loop_detection.json +++ /dev/null @@ -1,275 +0,0 @@ -{ - "campaigns": [], - "version": 7, - "site": "http://rapidpro.io", - "flows": [ - { - "version": 7, - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 132, - "destination": "9e348f0c-f7fa-4c06-a78b-9ffa839e5779", - "uuid": "13977cf2-68ee-49b9-8d88-2b9dbce12c5b", - "actions": [ - { - "msg": { - "eng": "Message One" - }, - "type": "reply" - } - ] - }, - { - "y": 167, - "x": 133, - "destination": "1f1adefb-0791-4e3c-9e8f-10dc6d56d3a5", - "uuid": "fb3e6f98-2cf3-40e8-ba1a-ea87dfcbd458", - "actions": [ - { - "msg": { - "eng": "You are in Group A" - }, - "type": "reply" - } - ] - }, - { - "y": 400, - "x": 434, - "destination": null, - "uuid": "3a0f77d1-f6bf-47f1-b194-de2051ba0738", - "actions": [ - { - "msg": { - "eng": "You picked @flow.message_split_a.category" - }, - "type": "reply" - } - ] - } - ], - "last_saved": "2015-05-04T19:48:06.359817Z", - "entry": "13977cf2-68ee-49b9-8d88-2b9dbce12c5b", - "rule_sets": [ - { - "uuid": "9e348f0c-f7fa-4c06-a78b-9ffa839e5779", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": { - "eng": "Group A" - }, - "base": "Group A", - "type": "contains_any" - }, - "category": { - "eng": "Group A", - "base": "Group A" - }, - "destination": "fb3e6f98-2cf3-40e8-ba1a-ea87dfcbd458", - "uuid": "605e4e98-5d85-45e7-a885-9c198977b63c" - }, - { - "test": { - "test": { - "eng": "Group B" - }, - "base": "Group B", - "type": "contains_any" - }, - "category": { - "eng": "Group B", - "base": "Group B" - }, - "destination": "1f1adefb-0791-4e3c-9e8f-10dc6d56d3a5", - "uuid": "81ba32a2-b3ea-4d46-aa7e-2ef32d7ced1e" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "Other", - "base": "Other" - }, - "destination": "782e9e71-c116-4195-add3-1867132f95b6", - "uuid": "9e712fa4-d988-483b-9820-e6bcc6d0cfba" - } - ], - "webhook": null, - "label": "Group Split A", - "operand": "@contact.groups", - "finished_key": null, - "ruleset_type": "expression", - "y": 70, - "x": 401 - }, - { - "uuid": "1f1adefb-0791-4e3c-9e8f-10dc6d56d3a5", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "eng": "Red" - }, - "base": "Red", - "type": "contains_any" - }, - "category": { - "eng": "Red", - "base": "Red" - }, - "destination": "3a0f77d1-f6bf-47f1-b194-de2051ba0738", - "uuid": "77f97500-0f06-443d-aec1-8d045962c7b8" - }, - { - "test": { - "test": { - "eng": "Green" - }, - "base": "Green", - "type": "contains_any" - }, - "category": { - "eng": "Green", - "base": "Green" - }, - "destination": "3a0f77d1-f6bf-47f1-b194-de2051ba0738", - "uuid": "15fa4511-c63e-4e45-be09-c63c87480189" - }, - { - "test": { - "test": { - "eng": "Blue" - }, - "base": "Blue", - "type": "contains_any" - }, - "category": { - "eng": "Blue", - "base": "Blue" - }, - "destination": "3a0f77d1-f6bf-47f1-b194-de2051ba0738", - "uuid": "8b8dc778-7d49-4572-af9f-97d5aee5dce8" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "Other", - "base": "All Responses" - }, - "destination": "3a0f77d1-f6bf-47f1-b194-de2051ba0738", - "uuid": "1d9900bc-9315-4ee2-892f-60013dd9541d" - } - ], - "webhook": null, - "label": "Message Split A", - "operand": "@step.value", - "finished_key": null, - "ruleset_type": "wait_message", - "y": 265, - "x": 356 - }, - { - "uuid": "782e9e71-c116-4195-add3-1867132f95b6", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": { - "eng": "Rowan" - }, - "base": "Rowan", - "type": "contains_any" - }, - "category": { - "eng": "Rowan", - "base": "Rowan" - }, - "destination": "1f1adefb-0791-4e3c-9e8f-10dc6d56d3a5", - "uuid": "f78edeea-4339-4f06-b95e-141975b97cb8" - }, - { - "test": { - "test": { - "eng": "Norbert" - }, - "base": "Norbert", - "type": "contains_any" - }, - "category": { - "eng": "Norbert", - "base": "Norbert" - }, - "destination": "1f1adefb-0791-4e3c-9e8f-10dc6d56d3a5", - "uuid": "e399c915-6226-4b00-bd9a-8347bd03a85a" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "Other", - "base": "Other" - }, - "destination": "1f1adefb-0791-4e3c-9e8f-10dc6d56d3a5", - "uuid": "7247d462-6ac5-4302-8ace-5a61c714377d" - } - ], - "webhook": null, - "label": "Name Split", - "operand": "@contact.name", - "finished_key": null, - "ruleset_type": "contact_field", - "y": 153, - "x": 735 - }, - { - "uuid": "771088fd-fc77-4966-8541-93c3c59c923d", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses", - "eng": "All Responses" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - }, - "uuid": "865baac0-da29-4752-be1e-1488457f708c" - } - ], - "webhook": null, - "label": "Message Split B", - "operand": "@step.value", - "finished_key": null, - "ruleset_type": "wait_message", - "y": 555, - "x": 419 - } - ], - "flow_type": "F", - "metadata": { - "name": "Loop Detection", - "id": 1000 - } - } - ], - "triggers": [] -} diff --git a/media/test_flows/malformed_groups.json b/media/test_flows/malformed_groups.json deleted file mode 100644 index 44ca9c05b60..00000000000 --- a/media/test_flows/malformed_groups.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "version": 4, - "flows": [ - { - "definition": { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "a6676605-332a-4309-a8b8-79b33e73adcd", - "actions": [ - { - "type": "add_group", - "uuid": "5f5a2aac-f4f4-4b47-af6e-186f6dafb9f0", - "group": {"name": "< 25", "id": 15572} - }, - { - "type": "del_group", - "uuid": "2a385c5b-e27c-43ac-bbc6-49653fede421", - "group": {"id": 15573} - } - ] - } - ], - "rule_sets": [], - "metadata": { - "uuid": "77ae372d-a937-4d9b-a703-cc1c75c4c6f1", - "notes": [], - "expires": 720, - "name": "Bad Mojo", - "revision": 1, - "saved_on": "2017-08-16T23:10:18.579169Z" - } - }, - "version": 4, - "flow_type": "F", - "name": "Bad Mojo", - "entry": "a6676605-332a-4309-a8b8-79b33e73adcd" - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/malformed_single_message.json b/media/test_flows/malformed_single_message.json deleted file mode 100644 index 9c753ed3a9e..00000000000 --- a/media/test_flows/malformed_single_message.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "campaigns": [], - "triggers": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "name": "Single Message Flow", - "id": -1, - "uuid": "f467561a-3b95-4a4a-94bc-97bc6b4268c0", - "definition": { - "entry": "2d702ba6-461e-442c-96bc-2b8a87c9ceca", - "action_sets": [ - { - "x": 0, - "y": 0, - "uuid": "2d702ba6-461e-442c-96bc-2b8a87c9ceca", - "destination": null, - "actions":[ - { - "msg": "Single message text", - "type": "reply" - } - ] - } - ], - "rulesets": [] - } - } - ] -} diff --git a/media/test_flows/migrate_to_11_0.json b/media/test_flows/migrate_to_11_0.json deleted file mode 100644 index 62a9bb2ce6d..00000000000 --- a/media/test_flows/migrate_to_11_0.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "version": "10.4", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "d96947d0-f975-47ee-be7d-3dfe68a52703", - "action_sets": [ - { - "uuid": "d96947d0-f975-47ee-be7d-3dfe68a52703", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "msg": { - "base": { - "base": "@date Something went wrong once. I shouldn't be a dict inside a dict." - } - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "1ee58c31-3504-49d3-914b-324d484fed1d" - } - ], - "exit_uuid": "f2566f59-5d36-4de7-8581-dcc5de7e8340" - } - ], - "rule_sets": [], - "base_language": "base", - "flow_type": "M", - "version": "10.4", - "metadata": { - "name": "Migrate to 11.0", - "saved_on": "2017-11-15T22:56:36.039558Z", - "revision": 5, - "uuid": "5a8deb77-23b8-46ee-a775-48ed32742e31", - "expires": 720 - } - } - ] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_10.json b/media/test_flows/migrate_to_11_10.json deleted file mode 100644 index 8f3ed9b8ea0..00000000000 --- a/media/test_flows/migrate_to_11_10.json +++ /dev/null @@ -1,239 +0,0 @@ -{ - "version": "11.9", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "bd6ca3fc-0505-4ea6-a1c6-60d0296a7db0", - "action_sets": [ - { - "uuid": "bd6ca3fc-0505-4ea6-a1c6-60d0296a7db0", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "say", - "uuid": "0738e369-279d-4e2f-a14c-08714b0d6f74", - "msg": { - "eng": "Hi there this is an IVR flow.. how did you get here?" - }, - "recording": null - } - ], - "exit_uuid": "0e78ff3d-8307-4c0e-a3b0-af4019930835" - } - ], - "rule_sets": [], - "base_language": "eng", - "flow_type": "V", - "version": "11.9", - "metadata": { - "name": "Migrate to 11.10 IVR Child", - "saved_on": "2019-01-25T21:14:37.475679Z", - "revision": 2, - "uuid": "5331c09c-2bd6-47a5-ac0d-973caf9d4cb5", - "expires": 5, - "ivr_retry": 60, - "ivr_retry_failed_events": false - } - }, - { - "entry": "920ce708-31d3-4870-804f-190fb37b9b8c", - "action_sets": [ - { - "uuid": "920ce708-31d3-4870-804f-190fb37b9b8c", - "x": 59, - "y": 0, - "destination": "90363d00-a669-4d84-ab57-eb27bf9c3284", - "actions": [ - { - "type": "reply", - "uuid": "3071cb5d-4caf-4a15-87c7-daae4a436ee7", - "msg": { - "eng": "hi" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "f646245c-ac46-4565-9215-cef53c34da09" - }, - { - "uuid": "bbd1c25f-ab01-4539-8f3e-b0ca18f366f4", - "x": 48, - "y": 345, - "destination": null, - "actions": [ - { - "type": "flow", - "uuid": "edb70527-47fa-463e-8318-359254b1bc0e", - "flow": { - "uuid": "5331c09c-2bd6-47a5-ac0d-973caf9d4cb5", - "name": "Migrate to 11.10 IVR Child" - } - } - ], - "exit_uuid": "330f0f9a-154b-49de-9ff9-a7891d4a11af" - }, - { - "uuid": "62e29de4-d85e-459d-ad38-220d1048b714", - "x": 412, - "y": 348, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "41ed5ba3-41c7-4e6f-b394-d451204bcf0f", - "msg": { - "eng": "Expired" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "0040f402-a6ac-4de4-8775-a4938b9011b8" - } - ], - "rule_sets": [ - { - "uuid": "90363d00-a669-4d84-ab57-eb27bf9c3284", - "x": 218, - "y": 82, - "label": "Response 1", - "rules": [ - { - "uuid": "4c6ac0ad-e8a8-4b1e-b958-ef2f22728821", - "category": { - "eng": "Completed" - }, - "destination": "e5dae061-2c94-45ae-a3bb-4822989e636a", - "destination_type": "R", - "test": { - "type": "subflow", - "exit_type": "completed" - }, - "label": null - }, - { - "uuid": "288dfab6-5171-4cf0-92af-e73af44dbeee", - "category": { - "eng": "Expired" - }, - "destination": "e5dae061-2c94-45ae-a3bb-4822989e636a", - "destination_type": "R", - "test": { - "type": "subflow", - "exit_type": "expired" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "subflow", - "response_type": "", - "operand": "@step.value", - "config": { - "flow": { - "name": "Migrate to 11.10 SMS Child", - "uuid": "a492288a-7b26-4507-b8db-173d28b83ad0" - } - } - }, - { - "uuid": "e5dae061-2c94-45ae-a3bb-4822989e636a", - "x": 218, - "y": 228, - "label": "Response 2", - "rules": [ - { - "uuid": "b9f763d2-82d7-4334-8ed8-806b803d32c1", - "category": { - "eng": "Completed" - }, - "destination": "bbd1c25f-ab01-4539-8f3e-b0ca18f366f4", - "destination_type": "A", - "test": { - "type": "subflow", - "exit_type": "completed" - }, - "label": null - }, - { - "uuid": "54b51a30-8c52-49aa-afc1-24d827a17a8d", - "category": { - "eng": "Expired" - }, - "destination": "62e29de4-d85e-459d-ad38-220d1048b714", - "destination_type": "A", - "test": { - "type": "subflow", - "exit_type": "expired" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "subflow", - "response_type": "", - "operand": "@step.value", - "config": { - "flow": { - "name": "Migrate to 11.10 IVR Child", - "uuid": "5331c09c-2bd6-47a5-ac0d-973caf9d4cb5" - } - } - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.9", - "metadata": { - "name": "Migrate to 11.10 Parent", - "saved_on": "2019-01-28T19:51:28.310305Z", - "revision": 52, - "uuid": "880cea73-fab6-4353-9db2-bf2e16067941", - "expires": 10080 - } - }, - { - "entry": "762fb8ad-1ec5-4246-a577-e08f0fe497e5", - "action_sets": [ - { - "uuid": "762fb8ad-1ec5-4246-a577-e08f0fe497e5", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "69a7f227-5f44-4ddc-80e1-b9dd855868eb", - "msg": { - "eng": "I'm just a regular honest messaging flow" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "8ec7a5ed-675c-4102-b211-ea10258ac5f9" - } - ], - "rule_sets": [], - "base_language": "eng", - "flow_type": "M", - "version": "11.9", - "metadata": { - "name": "Migrate to 11.10 SMS Child", - "saved_on": "2019-01-28T19:03:29.579743Z", - "revision": 2, - "uuid": "a492288a-7b26-4507-b8db-173d28b83ad0", - "expires": 10080, - "ivr_retry_failed_events": null - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_11.json b/media/test_flows/migrate_to_11_11.json deleted file mode 100644 index 9a41dc555db..00000000000 --- a/media/test_flows/migrate_to_11_11.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "version": "11.10", - "site": "https://textit.in", - "flows": [ - { - "entry": "22505d46-43c5-42ba-975e-725c01ea440f", - "action_sets": [ - { - "uuid": "22505d46-43c5-42ba-975e-725c01ea440f", - "x": 100, - "y": 0, - "destination": "f3a1a671-5f5b-489e-9410-9a09fa5eaafb", - "actions": [ - { - "type": "reply", - "uuid": "27dfd8ac-55c5-49c9-88e3-3fb84a9894ff", - "msg": { - "eng": "Hey" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "6e2b09ec-3cc0-4ee6-ae7b-b76bad3ab6d3" - }, - { - "uuid": "f3a1a671-5f5b-489e-9410-9a09fa5eaafb", - "x": 95, - "y": 101, - "destination": "78c20ee4-94bd-45e6-8510-8e602568fb6e", - "actions": [ - { - "type": "add_label", - "uuid": "bc82c11d-7654-44e4-966c-fb39e2851df0", - "labels": [ - { - "uuid": "0bfecd01-9612-48ab-8c49-72170de6ee49", - "name": "Hello" - } - ] - } - ], - "exit_uuid": "84bf44a1-13fd-44cb-8014-d6feb06e010f" - }, - { - "uuid": "7ca2b0ef-0b23-4c6e-bccb-c5f2d62d2663", - "x": 146, - "y": 358, - "destination": null, - "actions": [ - { - "type": "add_label", - "uuid": "910bf3b5-951f-47a8-93df-11a6eac8bf0f", - "labels": [ - { - "uuid": "0bfecd01-9612-48ab-8c49-72170de6ee49", - "name": "Hello" - } - ] - } - ], - "exit_uuid": "6d579c28-9f3f-4584-bd2e-74009612fdbb" - } - ], - "rule_sets": [ - { - "uuid": "78c20ee4-94bd-45e6-8510-8e602568fb6e", - "x": 85, - "y": 219, - "label": "Response 1", - "rules": [ - { - "uuid": "33438bbf-49bd-4468-9a74-bbd7e1f58f57", - "category": { - "eng": "All Responses" - }, - "destination": "7ca2b0ef-0b23-4c6e-bccb-c5f2d62d2663", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "", - "operand": "@step.value", - "config": {} - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.10", - "metadata": { - "name": "Add Label", - "saved_on": "2019-02-12T09:23:05.746930Z", - "revision": 7, - "uuid": "e9b5b8ba-43f4-4bc2-a790-811ee1cfe392", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_12.json b/media/test_flows/migrate_to_11_12.json deleted file mode 100644 index 142c3151dbe..00000000000 --- a/media/test_flows/migrate_to_11_12.json +++ /dev/null @@ -1,197 +0,0 @@ -{ - "version": "11.12", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "456b7f83-a96b-4f17-aa0a-116a30ee0d52", - "action_sets": [ - { - "uuid": "456b7f83-a96b-4f17-aa0a-116a30ee0d52", - "x": 100, - "y": 0, - "destination": "cfea15b5-3761-41d0-ad3e-33df7a9b835a", - "actions": [ - { - "type": "channel", - "uuid": "338300e8-b433-4372-8a12-87a0f543ee8a", - "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", - "name": "Android: 1234" - } - ], - "exit_uuid": "6fb525e7-bc24-4358-acde-f2d712b28f2b" - }, - { - "uuid": "cfea15b5-3761-41d0-ad3e-33df7a9b835a", - "x": 114, - "y": 156, - "destination": "3bb1fb6d-f0a3-4ec7-abba-cc5fac4c6a9d", - "actions": [ - { - "type": "reply", - "uuid": "bbdd28f0-824f-41b4-af25-5d6f9a4afefb", - "msg": { - "base": "Hey there, Yes or No?" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "902db0bc-f6a7-45d2-93b2-f47f3af1261e" - }, - { - "uuid": "af882e66-9ae2-4bc1-9af7-c8c2e7373766", - "x": 181, - "y": 452, - "destination": "85d88c16-fafe-4b8e-8e58-a6dc6e1e0e77", - "actions": [ - { - "type": "channel", - "uuid": "437d71a2-bb17-4e71-bef7-ad6b58f0eb85", - "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", - "name": "Android: 1234" - } - ], - "exit_uuid": "cec84721-7f8f-43c3-9af2-4d5d6a15f9de" - }, - { - "uuid": "76e091fe-62a5-4786-9465-7c1fb2446694", - "x": 460, - "y": 117, - "destination": "ef9afd2d-d106-4168-a104-20ddc14f9444", - "actions": [ - { - "type": "reply", - "uuid": "f7d12748-440e-4ef1-97d4-8a9efddf4454", - "msg": { - "base": "Yo, What? Repeat Yes or No" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "f5ce0ce5-8023-4b8d-b635-762a2c18726f" - }, - { - "uuid": "9eef8677-8598-4e87-9e21-3ad245d87aee", - "x": 193, - "y": 633, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "1d3ec932-6b6f-45c2-b4d6-9a0e07721686", - "msg": { - "base": "Bye" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "839dd7c4-64b9-428f-b1d0-c386f493fc4f" - }, - { - "uuid": "85d88c16-fafe-4b8e-8e58-a6dc6e1e0e77", - "x": 173, - "y": 550, - "destination": "9eef8677-8598-4e87-9e21-3ad245d87aee", - "actions": [ - { - "type": "channel", - "uuid": "0afa546d-8308-41c2-a70c-979846108bec", - "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", - "name": "Android: 1234" - } - ], - "exit_uuid": "835a5ca9-d518-452f-865c-ca8e5cde4777" - }, - { - "uuid": "ef9afd2d-d106-4168-a104-20ddc14f9444", - "x": 501, - "y": 242, - "destination": "3bb1fb6d-f0a3-4ec7-abba-cc5fac4c6a9d", - "actions": [ - { - "type": "channel", - "uuid": "28d63382-40ea-4741-ba3a-2930348fab0e", - "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", - "name": "Android: 1234" - } - ], - "exit_uuid": "be8ca9a5-0f61-4c9d-93e4-02aa6bb27afc" - } - ], - "rule_sets": [ - { - "uuid": "3bb1fb6d-f0a3-4ec7-abba-cc5fac4c6a9d", - "x": 134, - "y": 315, - "label": "Response 1", - "rules": [ - { - "uuid": "2924a1d0-be47-4f8e-aefb-f7ff3a563a43", - "category": { - "base": "Yes" - }, - "destination": "af882e66-9ae2-4bc1-9af7-c8c2e7373766", - "destination_type": "A", - "test": { - "type": "contains_any", - "test": { - "base": "Yes" - } - }, - "label": null - }, - { - "uuid": "0107f9e4-b46c-40d7-b25b-058cac3a167e", - "category": { - "base": "No" - }, - "destination": "af882e66-9ae2-4bc1-9af7-c8c2e7373766", - "destination_type": "A", - "test": { - "type": "contains_any", - "test": { - "base": "No" - } - }, - "label": null - }, - { - "uuid": "ad81cc6d-1973-4eed-b97d-6edd9ebdeedc", - "category": { - "base": "Other" - }, - "destination": "76e091fe-62a5-4786-9465-7c1fb2446694", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "", - "operand": "@step.value", - "config": {} - } - ], - "base_language": "base", - "flow_type": "M", - "version": "11.12", - "metadata": { - "name": "channels", - "saved_on": "2019-02-26T21:16:32.055957Z", - "revision": 24, - "uuid": "e5fdf453-428f-4da1-9703-0decdf7cf6f9", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_12_one_node.json b/media/test_flows/migrate_to_11_12_one_node.json deleted file mode 100644 index 9bf4eedd68d..00000000000 --- a/media/test_flows/migrate_to_11_12_one_node.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "version": "11.11", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "b0b6559d-e5bd-4deb-a4ab-9e5f04001dd4", - "action_sets": [ - { - "uuid": "b0b6559d-e5bd-4deb-a4ab-9e5f04001dd4", - "x": 100, - "y": 0, - "actions": [ - { - "type": "channel", - "uuid": "4b34b85d-da31-40c9-af65-6d76ca54b1b5", - "channel": "228cc824-6740-482a-ac2f-4f08ca449e06", - "name": "Android: 1234" - } - ], - "exit_uuid": "be37f250-f992-45e0-97fd-a3c0f57584dc" - } - ], - "rule_sets": [], - "base_language": "base", - "flow_type": "M", - "version": "11.11", - "metadata": { - "name": "channel", - "saved_on": "2019-02-28T08:55:17.275670Z", - "revision": 2, - "uuid": "8a8612bc-ff3a-45ea-b7a5-2673ce901cd9", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_12_other_org.json b/media/test_flows/migrate_to_11_12_other_org.json deleted file mode 100644 index 7deb686c5bd..00000000000 --- a/media/test_flows/migrate_to_11_12_other_org.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "version": "11.11", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "a1c00b3e-a904-4085-851d-e5e386d728b8", - "action_sets": [{ - "uuid": "a1c00b3e-a904-4085-851d-e5e386d728b8", - "x": 124, - "y": 16, - "actions": [{ - "type": "channel", - "channel": "CHANNEL-UUID", - "uuid": "84889e4d-e7e8-4415-9ad9-db27d9972558", - "name": "Not Ours" - }], - "exit_uuid": "eada09b7-7136-4f24-a34f-62ca7b404423" - }], - "rule_sets": [], - "base_language": "eng", - "flow_type": "M", - "version": "11.11", - "metadata": { - "name": "Other Org Channel", - "saved_on": "2019-02-25T20:36:14.155001Z", - "revision": 19, - "uuid": "bb8ca54b-7dcb-431f-bd86-ec3082b63469", - "expires": 43200, - "ivr_retry_failed_events": null, - "notes": [] - }, - "type": "M" -} - ] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_3.json b/media/test_flows/migrate_to_11_3.json deleted file mode 100644 index ea6a22c7fc5..00000000000 --- a/media/test_flows/migrate_to_11_3.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "version": "11.2", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", - "action_sets": [ - { - "uuid": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", - "x": 412, - "y": 814, - "destination": null, - "actions": [ - { - "type": "api", - "uuid": "9b46779a-f680-450f-8f3c-005f3b7efccd", - "webhook": "http://example.com/?thing=@flow.response_1&foo=bar", - "action": "POST", - "webhook_headers": [] - } - ], - "exit_uuid": "25d8d2ae-ea82-4214-9561-42e0bf420a93" - } - ], - "rule_sets": [ - { - "uuid": "2831f7ad-23e6-4ab3-91d9-936f14fcf35e", - "x": 100, - "y": 0, - "label": "Response 1", - "rules": [ - { - "uuid": "c799def9-345b-46f9-a838-a59191cdb181", - "category": { - "eng": "Success" - }, - "destination": "7e0afb0a-8ca2-479f-8f72-49f8c1081d60", - "destination_type": "R", - "test": { - "type": "webhook_status", - "status": "success" - }, - "label": null - }, - { - "uuid": "1ace9344-3053-4dc2-aced-9a6e3c8a6e9d", - "category": { - "eng": "Failure" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "webhook_status", - "status": "failure" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "webhook", - "response_type": "", - "operand": "@step.value", - "config": { - "webhook": "http://example.com/webhook1", - "webhook_action": "POST", - "webhook_headers": [] - } - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.2", - "metadata": { - "name": "Migrate to 11.3 Test", - "saved_on": "2018-09-25T14:57:23.429081Z", - "revision": 97, - "uuid": "915144c5-605e-46f3-afa3-53aae2c9b8ee", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_4.json b/media/test_flows/migrate_to_11_4.json deleted file mode 100644 index 9a89c6fdf88..00000000000 --- a/media/test_flows/migrate_to_11_4.json +++ /dev/null @@ -1,168 +0,0 @@ -{ - "version": "11.3", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "019d0fab-eb51-4431-9f51-ddf207d0a744", - "action_sets": [ - { - "uuid": "92fb739f-4a99-4e29-8078-1f8fb06d127e", - "x": 241, - "y": 425, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "0382e5aa-bfda-42c8-84d3-7893aba002f8", - "msg": { - "eng": "@flow.response_1.text\n@flow.response_2.text\n@flow.response_3.text\n@flow.response_3\n@(CONCATENATE(flow.response_2.text, \"blerg\"))" - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "send", - "uuid": "b5860896-db39-4ebb-b842-d38edf46fb61", - "msg": { - "eng": "@flow.response_1.text\n@flow.response_2.text\n@flow.response_3.text\n@flow.response_3\n@(CONCATENATE(flow.response_2.text, \"blerg\"))" - }, - "contacts": [ - { - "id": 277738, - "name": "05fe51bf5a434b9", - "uuid": "74eed75b-dd4f-4d24-9fc5-474052dbc086", - "urns": [ - { - "scheme": "tel", - "path": "+2353265262", - "priority": 90 - } - ] - } - ], - "groups": [], - "variables": [], - "media": {} - }, - { - "type": "email", - "uuid": "c9130ab6-d2b2-419c-8109-65b5afc47039", - "emails": [ - "test@test.com" - ], - "subject": "Testing", - "msg": "@flow.response_1.text\n@flow.response_2.text\n@flow.response_3.text\n@flow.response_3\n@(CONCATENATE(flow.response_2.text, \"blerg\"))" - } - ], - "exit_uuid": "ea5640be-105b-4277-b04e-7ad55d2c898e" - } - ], - "rule_sets": [ - { - "uuid": "019d0fab-eb51-4431-9f51-ddf207d0a744", - "x": 226, - "y": 118, - "label": "Response 1", - "rules": [ - { - "uuid": "7fd3aae5-66ca-4d8d-9923-3ef4424e7658", - "category": { - "eng": "All Responses" - }, - "destination": "fc1b062c-52c0-4c9e-87bd-1f9437d513bf", - "destination_type": "R", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "", - "operand": "@step.value", - "config": {} - }, - { - "uuid": "fc1b062c-52c0-4c9e-87bd-1f9437d513bf", - "x": 226, - "y": 232, - "label": "Response 2", - "rules": [ - { - "uuid": "58a4e6f6-fe44-4ac9-bf98-edffd6dfad04", - "category": { - "eng": "All Responses" - }, - "destination": "518b6f12-0192-4a75-8900-43a5dea02340", - "destination_type": "R", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "expression", - "response_type": "", - "operand": "@contact.uuid", - "config": {} - }, - { - "uuid": "518b6f12-0192-4a75-8900-43a5dea02340", - "x": 226, - "y": 335, - "label": "Response 3", - "rules": [ - { - "uuid": "0d1b5fd9-bfee-4df6-9837-9883787f0661", - "category": { - "eng": "Bucket 1" - }, - "destination": "92fb739f-4a99-4e29-8078-1f8fb06d127e", - "destination_type": "A", - "test": { - "type": "between", - "min": "0", - "max": "0.5" - }, - "label": null - }, - { - "uuid": "561b7ce2-5975-4925-a76a-f4a618b11c8b", - "category": { - "eng": "Bucket 2" - }, - "destination": "92fb739f-4a99-4e29-8078-1f8fb06d127e", - "destination_type": "A", - "test": { - "type": "between", - "min": "0.5", - "max": "1" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "random", - "response_type": "", - "operand": "@(RAND())", - "config": {} - } - ], - "base_language": "eng", - "flow_type": "F", - "version": "11.3", - "metadata": { - "name": "Migrate to 11.4", - "saved_on": "2018-06-25T21:58:04.000768Z", - "revision": 123, - "uuid": "025f1d6e-ec87-4045-8471-0a028b9483aa", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_5.json b/media/test_flows/migrate_to_11_5.json deleted file mode 100644 index 13725b4a54f..00000000000 --- a/media/test_flows/migrate_to_11_5.json +++ /dev/null @@ -1,398 +0,0 @@ -{ - "version": "11.4", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "2831f7ad-23e6-4ab3-91d9-936f14fcf35e", - "action_sets": [ - { - "uuid": "35707236-5dd6-487d-bea4-6a73822852bf", - "x": 122, - "y": 458, - "destination": "51956031-9f42-475f-9d43-3ab2f87f4dd2", - "actions": [ - { - "type": "reply", - "uuid": "c82df796-9d8f-4e9b-b76c-97027fa74ef7", - "msg": { - "eng": "@flow.response_1\n@flow.response_1.value\n@flow.response_1.category\n@(upper(flow.response_1))\n@(upper(flow.response_1.category))\n\n@flow.response_2\n@flow.response_2.value\n@flow.response_2.category\n@(upper(flow.response_2))\n@(upper(flow.response_2.category))\n\n@flow.response_3\n@flow.response_3.value\n@flow.response_3.category\n@(upper(flow.response_3))\n@(upper(flow.response_3.category))" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "65af1dca-b48e-4b36-867c-2ace47038093" - }, - { - "uuid": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", - "x": 412, - "y": 814, - "destination": null, - "actions": [ - { - "type": "api", - "uuid": "9b46779a-f680-450f-8f3c-005f3b7efccd", - "webhook": "http://example.com/?thing=@flow.response_1&foo=bar", - "action": "GET", - "webhook_headers": [] - }, - { - "type": "save", - "uuid": "e0ecf2a5-0429-45ec-a9d7-e2c122274484", - "label": "Contact Name", - "field": "name", - "value": "@flow.response_3.value" - } - ], - "exit_uuid": "25d8d2ae-ea82-4214-9561-42e0bf420a93" - } - ], - "rule_sets": [ - { - "uuid": "2831f7ad-23e6-4ab3-91d9-936f14fcf35e", - "x": 100, - "y": 0, - "label": "Response 1", - "rules": [ - { - "uuid": "c799def9-345b-46f9-a838-a59191cdb181", - "category": { - "eng": "Success" - }, - "destination": "7e0afb0a-8ca2-479f-8f72-49f8c1081d60", - "destination_type": "R", - "test": { - "type": "webhook_status", - "status": "success" - }, - "label": null - }, - { - "uuid": "1ace9344-3053-4dc2-aced-9a6e3c8a6e9d", - "category": { - "eng": "Failure" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "webhook_status", - "status": "failure" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "webhook", - "response_type": "", - "operand": "@step.value", - "config": { - "webhook": "http://example.com/webhook1", - "webhook_action": "GET", - "webhook_headers": [] - } - }, - { - "uuid": "7e0afb0a-8ca2-479f-8f72-49f8c1081d60", - "x": 103, - "y": 125, - "label": "Response 2", - "rules": [ - { - "uuid": "ce50f51d-f052-4ff1-8a9b-a79faa62dfc2", - "category": { - "eng": "Success" - }, - "destination": "5906c8f3-46f2-4319-8743-44fb26f2b109", - "destination_type": "R", - "test": { - "type": "webhook_status", - "status": "success" - }, - "label": null - }, - { - "uuid": "338e6c08-3597-4d22-beef-80d27b870a93", - "category": { - "eng": "Failure" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "webhook_status", - "status": "failure" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "webhook", - "response_type": "", - "operand": "@step.value", - "config": { - "webhook": "http://example.com/webhook2", - "webhook_action": "GET", - "webhook_headers": [] - } - }, - { - "uuid": "5906c8f3-46f2-4319-8743-44fb26f2b109", - "x": 105, - "y": 243, - "label": "Response 2", - "rules": [ - { - "uuid": "6328e346-49c6-4607-a573-e8dc6e60bfcd", - "category": { - "eng": "All Responses" - }, - "destination": "728a9a97-f28e-4fb3-a96a-7a7a8d5e5a4c", - "destination_type": "R", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "expression", - "response_type": "", - "operand": "@step.value", - "config": {} - }, - { - "uuid": "728a9a97-f28e-4fb3-a96a-7a7a8d5e5a4c", - "x": 112, - "y": 346, - "label": "Response 3", - "rules": [ - { - "uuid": "fb64dd04-8dd3-4e28-8607-468d1748a81f", - "category": { - "eng": "Success" - }, - "destination": "35707236-5dd6-487d-bea4-6a73822852bf", - "destination_type": "A", - "test": { - "type": "webhook_status", - "status": "success" - }, - "label": null - }, - { - "uuid": "992c7429-221a-40f0-80be-fd6fbe858f57", - "category": { - "eng": "Failure" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "webhook_status", - "status": "failure" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "resthook", - "response_type": "", - "operand": "@step.value", - "config": { - "resthook": "test-resthook-event" - } - }, - { - "uuid": "51956031-9f42-475f-9d43-3ab2f87f4dd2", - "x": 411, - "y": 513, - "label": "Response 5", - "rules": [ - { - "uuid": "c06fb4fe-09a0-4990-b32e-e233de7edfda", - "category": { - "eng": "All Responses" - }, - "destination": "f39a6d73-57d9-4d10-9055-57446addc87a", - "destination_type": "R", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "expression", - "response_type": "", - "operand": "@(flow.response_1 & flow.response_2 & flow.response_3)", - "config": {} - }, - { - "uuid": "f39a6d73-57d9-4d10-9055-57446addc87a", - "x": 414, - "y": 625, - "label": "Response 6", - "rules": [ - { - "uuid": "820f0020-0c72-44cd-9c12-a2b05c13e470", - "category": { - "eng": "Yes" - }, - "destination": "0e0c0e1f-e4ae-4531-ba19-48300de0f86d", - "destination_type": "R", - "test": { - "type": "contains_any", - "test": { - "eng": "yes" - } - }, - "label": null - }, - { - "uuid": "8e55e70f-acf0-45a2-b7f9-2f95ccbbfc4d", - "category": { - "eng": "Matching" - }, - "destination": "0e0c0e1f-e4ae-4531-ba19-48300de0f86d", - "destination_type": "R", - "test": { - "type": "contains_any", - "test": { - "eng": "@flow.response_1" - } - }, - "label": null - }, - { - "uuid": "d1c61a49-64f5-4ff6-b17f-1f22472f829f", - "category": { - "eng": "Other" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "flow_field", - "response_type": "", - "operand": "@flow.response_1", - "config": {} - }, - { - "uuid": "0e0c0e1f-e4ae-4531-ba19-48300de0f86d", - "x": 489, - "y": 722, - "label": "Response 7", - "rules": [ - { - "uuid": "234fff68-780f-442f-a1c6-757131fbc213", - "category": { - "eng": "Success" - }, - "destination": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", - "destination_type": "A", - "test": { - "type": "webhook_status", - "status": "success" - }, - "label": null - }, - { - "uuid": "70b79516-40a5-439c-9dee-45b242d6bb8b", - "category": { - "eng": "Failure" - }, - "destination": "ab700bd7-480b-4e34-bd59-5be7c453aa4e", - "destination_type": "A", - "test": { - "type": "webhook_status", - "status": "failure" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "webhook", - "response_type": "", - "operand": "@step.value", - "config": { - "webhook": "http://example.com/?thing=@flow.response_1.value", - "webhook_action": "GET", - "webhook_headers": [] - } - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.4", - "metadata": { - "name": "Migrate to 11.5 Test", - "saved_on": "2018-09-25T14:57:23.429081Z", - "revision": 97, - "uuid": "915144c5-605e-46f3-afa3-53aae2c9b8ee", - "expires": 10080, - "notes": [ - { - "x": 357, - "y": 0, - "title": "New Note", - "body": "@flow.response_1" - }, - { - "x": 358, - "y": 117, - "title": "New Note", - "body": "flow.response_2" - }, - { - "x": 358, - "y": 236, - "title": "New Note", - "body": "reuses flow.response_2" - }, - { - "x": 360, - "y": 346, - "title": "New Note", - "body": "@flow.response_3" - }, - { - "x": 671, - "y": 498, - "title": "New Note", - "body": "operand should be migrated too" - }, - { - "x": 717, - "y": 608, - "title": "New Note", - "body": "rule test should be migrated" - }, - { - "x": 747, - "y": 712, - "title": "New Note", - "body": "webhook URL in config should be migrated" - }, - { - "x": 681, - "y": 830, - "title": "New Note", - "body": "webhook URL on action should be migrated" - }, - { - "x": 682, - "y": 934, - "title": "New Note", - "body": "field value should be migrated" - } - ] - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_6.json b/media/test_flows/migrate_to_11_6.json deleted file mode 100644 index 64500d8117c..00000000000 --- a/media/test_flows/migrate_to_11_6.json +++ /dev/null @@ -1,252 +0,0 @@ -{ - "version": "11.5", - "site": "https://textit.in", - "flows": [ - { - "entry": "c4462613-5936-42cc-a286-82e5f1816793", - "action_sets": [ - { - "uuid": "eca0f1d7-59ef-4a7c-a4a9-9bbd049eb144", - "x": 76, - "y": 99, - "destination": "d21be990-5e48-4e4b-995f-c9df8f38e517", - "actions": [ - { - "type": "add_group", - "uuid": "feb7a33e-bc8b-44d8-9112-bc4e910fe304", - "groups": [ - { - "uuid": "1966e54a-fc30-4a96-81ea-9b0185b8b7de", - "name": "Cat Fanciers" - } - ] - }, - { - "type": "add_group", - "uuid": "ca82f0e0-43ca-426c-a77c-93cf297b8e7c", - "groups": [ - { - "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", - "name": "Catnado" - } - ] - }, - { - "type": "reply", - "uuid": "d57e9e9f-ada4-4a22-99ef-b8bf3dbcdcae", - "msg": { - "eng": "You are a cat fan! Purrrrr." - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "55f88a1e-73ad-4b6d-9a04-626046bbe5a8" - }, - { - "uuid": "ef389049-d2e3-4343-b91f-13ea2db5f943", - "x": 558, - "y": 94, - "destination": "d21be990-5e48-4e4b-995f-c9df8f38e517", - "actions": [ - { - "type": "del_group", - "uuid": "cea907a8-af81-49af-92e6-f246e52179fe", - "groups": [ - { - "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", - "name": "Catnado" - } - ] - }, - { - "type": "reply", - "uuid": "394a328f-f829-43f2-9975-fe2f27c8b786", - "msg": { - "eng": "You are not a cat fan. Hissssss." - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "9ba78afa-948e-44c5-992f-84030f2eaa6b" - }, - { - "uuid": "d21be990-5e48-4e4b-995f-c9df8f38e517", - "x": 319, - "y": 323, - "destination": "35416fea-787d-48c1-b839-76eca089ad2e", - "actions": [ - { - "type": "channel", - "uuid": "78c58574-9f91-4c27-855e-73eacc99c395", - "channel": "bd55bb31-8ed4-4f89-b903-7103aa3762be", - "name": "Telegram: TextItBot" - } - ], - "exit_uuid": "c86638a9-2688-47c9-83ec-7f10ef49de1e" - }, - { - "uuid": "35416fea-787d-48c1-b839-76eca089ad2e", - "x": 319, - "y": 468, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "30d35b8f-f439-482a-91b1-d3b1a4351071", - "msg": { - "eng": "All done." - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "send", - "uuid": "a7b6def8-d315-49bd-82e4-85887f39babe", - "msg": { - "eng": "Hey Cat Fans!" - }, - "contacts": [], - "groups": [ - { - "uuid": "47b1b36c-7736-47b9-b63a-c0ebfb610e61", - "name": "Cat Blasts" - } - ], - "variables": [], - "media": {} - }, - { - "type": "trigger-flow", - "uuid": "540965e5-bdfe-4416-b4dd-449220b1c588", - "flow": { - "uuid": "ef9603ff-3886-4e5e-8870-0f643b6098de", - "name": "Cataclysmic" - }, - "contacts": [], - "groups": [ - { - "uuid": "22a48356-71e9-4ae1-9f93-4021855c0bd5", - "name": "Cat Alerts" - } - ], - "variables": [] - } - ], - "exit_uuid": "f2ef5066-434d-42bc-a5cb-29c59e51432f" - } - ], - "rule_sets": [ - { - "uuid": "c4462613-5936-42cc-a286-82e5f1816793", - "x": 294, - "y": 0, - "label": "Response 1", - "rules": [ - { - "uuid": "17d69564-60c9-4a56-be8b-34e98a2ce14a", - "category": { - "eng": "Cat Facts" - }, - "destination": "eca0f1d7-59ef-4a7c-a4a9-9bbd049eb144", - "destination_type": "A", - "test": { - "type": "in_group", - "test": { - "name": "Cat Facts", - "uuid": "c7bc1eef-b7aa-4959-ab90-3e33e0d3b1f9" - } - }, - "label": null - }, - { - "uuid": "a9ec4d0a-2ddd-4a13-a1d2-c63ce9916a04", - "category": { - "eng": "Other" - }, - "destination": "ef389049-d2e3-4343-b91f-13ea2db5f943", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "group", - "response_type": "", - "operand": "@step.value", - "config": {} - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.5", - "metadata": { - "name": "Cataclysmic", - "saved_on": "2018-10-18T17:03:54.835916Z", - "revision": 49, - "uuid": "ef9603ff-3886-4e5e-8870-0f643b6098de", - "expires": 10080, - "notes": [] - } - }, - { - "entry": "0429d1f9-82ed-4198-80a2-3b213aa11fd5", - "action_sets": [ - { - "uuid": "0429d1f9-82ed-4198-80a2-3b213aa11fd5", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "add_group", - "uuid": "11f61fc6-834e-4cbc-88ee-c834279345e6", - "groups": [ - { - "uuid": "22a48356-71e9-4ae1-9f93-4021855c0bd5", - "name": "Cat Alerts" - }, - { - "uuid": "c7bc1eef-b7aa-4959-ab90-3e33e0d3b1f9", - "name": "Cat Facts" - }, - { - "uuid": "47b1b36c-7736-47b9-b63a-c0ebfb610e61", - "name": "Cat Blasts" - }, - { - "uuid": "1966e54a-fc30-4a96-81ea-9b0185b8b7de", - "name": "Cat Fanciers" - }, - { - "uuid": "bc4d7100-60ac-44f0-aa78-0ec9373d2c2f", - "name": "Catnado" - } - ] - } - ], - "exit_uuid": "029a7c9d-c935-4ed1-9573-543ded29d954" - } - ], - "rule_sets": [], - "base_language": "eng", - "flow_type": "M", - "version": "11.5", - "metadata": { - "name": "Catastrophe", - "saved_on": "2018-10-18T19:03:07.702388Z", - "revision": 1, - "uuid": "d6dd96b1-d500-4c7a-9f9c-eae3f2a2a7c5", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_7.json b/media/test_flows/migrate_to_11_7.json deleted file mode 100644 index 7598f82934d..00000000000 --- a/media/test_flows/migrate_to_11_7.json +++ /dev/null @@ -1,246 +0,0 @@ -{ - "version": "11.6", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "eb59aed8-2eeb-43cd-adfc-9c44721436a2", - "action_sets": [ - { - "uuid": "eb59aed8-2eeb-43cd-adfc-9c44721436a2", - "x": 102, - "y": 0, - "destination": "cd2d8a3e-c267-40ef-8481-37d4076a57d3", - "actions": [ - { - "type": "api", - "uuid": "82d23a8c-af4b-4a33-8d56-03139b1168cc", - "webhook": "http://example.com/hook1", - "action": "GET", - "webhook_headers": [ - { - "name": "Header1", - "value": "Value1" - } - ] - } - ], - "exit_uuid": "787517ce-9a6d-479e-bc81-c3f4dcbb3d1d" - }, - { - "uuid": "cd2d8a3e-c267-40ef-8481-37d4076a57d3", - "x": 149, - "y": 107, - "destination": "efe05d14-7a96-4ec5-870c-5183408821ae", - "actions": [ - { - "type": "reply", - "uuid": "544fd45b-f9a9-4543-b352-06b67dc0c32c", - "msg": { - "eng": "Action before 1" - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "reply", - "uuid": "252b59b0-3664-4a36-8b9f-9317e78011da", - "msg": { - "eng": "Action before 2" - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "api", - "uuid": "55c868c0-f6f7-49a8-856c-809bd082ae3b", - "webhook": "http://example.com/hook2", - "action": "POST", - "webhook_headers": [] - }, - { - "type": "reply", - "uuid": "f7ec546c-9adf-4d51-ab8e-8a1cbde8d910", - "msg": { - "eng": "Action after 1" - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "reply", - "uuid": "a44ec0b8-085d-4e80-b361-7529e659e5e6", - "msg": { - "eng": "Action after 2" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "32c5dba9-17d1-4d5d-a992-19c1ec6cf825" - }, - { - "uuid": "efe05d14-7a96-4ec5-870c-5183408821ae", - "x": 199, - "y": 446, - "destination": "b5ea564c-4acd-4ce4-aeff-37e5c73047e7", - "actions": [ - { - "type": "api", - "uuid": "05377f3c-d9b0-428d-ae14-219d2f3d0f9a", - "webhook": "http://example.com/hook3", - "action": "GET", - "webhook_headers": [] - }, - { - "type": "api", - "uuid": "61fadf6d-d2ba-4bbb-b312-1db3e336a661", - "webhook": "http://example.com/hook4", - "action": "GET", - "webhook_headers": [] - } - ], - "exit_uuid": "c2236afe-c3cb-43a5-9fa0-ee6cbfb92f42" - }, - { - "uuid": "b5ea564c-4acd-4ce4-aeff-37e5c73047e7", - "x": 245, - "y": 608, - "destination": "64d8b8a5-aca0-4406-b417-5827262e67e2", - "actions": [ - { - "type": "reply", - "uuid": "be4dbed8-7334-4700-a94d-50275015c048", - "msg": { - "eng": "Actionset without webhook" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "40b890ab-8fab-459f-8d5e-48d2ea57f7ce" - }, - { - "uuid": "d6da8268-0c61-4154-8659-dd073878541c", - "x": 1036, - "y": 265, - "destination": null, - "actions": [ - { - "type": "api", - "uuid": "b8a8715b-0fb5-4dde-a1fe-4fef045bb16c", - "webhook": "http://example.com/hook5", - "action": "GET", - "webhook_headers": [] - } - ], - "exit_uuid": "15170baf-8b15-4104-990c-13635f0bafbb" - } - ], - "rule_sets": [ - { - "uuid": "64d8b8a5-aca0-4406-b417-5827262e67e2", - "x": 673, - "y": 54, - "label": "Response 1", - "rules": [ - { - "uuid": "4bc64a60-b848-4f07-bbe8-8b82e72b6dea", - "category": { - "eng": "1" - }, - "destination": "eb59aed8-2eeb-43cd-adfc-9c44721436a2", - "destination_type": "A", - "test": { - "type": "contains_any", - "test": { - "eng": "1" - } - }, - "label": null - }, - { - "uuid": "2faff885-6ac4-4cef-bd11-53802be22508", - "category": { - "eng": "2" - }, - "destination": "cd2d8a3e-c267-40ef-8481-37d4076a57d3", - "destination_type": "A", - "test": { - "type": "contains_any", - "test": { - "eng": "2" - } - }, - "label": null - }, - { - "uuid": "05efb767-1319-4f93-ba3f-8d3860a915af", - "category": { - "eng": "3" - }, - "destination": "efe05d14-7a96-4ec5-870c-5183408821ae", - "destination_type": "A", - "test": { - "type": "contains_any", - "test": { - "eng": "3" - } - }, - "label": null - }, - { - "uuid": "2bfbb15e-fb54-41a5-ba43-c67c219e8c57", - "category": { - "eng": "4" - }, - "destination": "b5ea564c-4acd-4ce4-aeff-37e5c73047e7", - "destination_type": "A", - "test": { - "type": "contains_any", - "test": { - "eng": "4" - } - }, - "label": null - }, - { - "uuid": "d091ea29-07b9-48b8-bc52-1de00687af1b", - "category": { - "eng": "Other" - }, - "destination": "d6da8268-0c61-4154-8659-dd073878541c", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "", - "operand": "@step.value", - "config": {} - } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.6", - "metadata": { - "name": "Webhook Action Migration", - "saved_on": "2018-11-05T19:21:37.062932Z", - "revision": 61, - "uuid": "c9b9d79a-93b4-41e5-8ca3-f0b09faa2457", - "expires": 10080, - "notes": [] - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_11_8.json b/media/test_flows/migrate_to_11_8.json deleted file mode 100644 index 8f51778859f..00000000000 --- a/media/test_flows/migrate_to_11_8.json +++ /dev/null @@ -1,341 +0,0 @@ -{ - "version": 11.7, - "site": null, - "flows": [ - { - "entry": "fde99613-a3e9-4f97-9e88-81ebc0ea6211", - "action_sets": [ - { - "uuid": "788064a1-fe23-4f6e-8041-200412dff55e", - "x": 389, - "y": 991, - "destination": "d8be5901-e847-4b6f-a603-51eb571718a1", - "actions": [ - { - "type": "reply", - "uuid": "fdee102d-5259-4153-8e43-0b7df1d3a1ee", - "msg": { - "base": "Thanks @extra.name, we'll be in touch ASAP about order # @extra.order." - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "email", - "uuid": "66c4a60f-3d63-4eed-bd03-c801baa0d793", - "emails": [ - "rowanseymour@gmail.com" - ], - "subject": "Order Comment: @flow.lookup: @extra.order", - "msg": "Customer @extra.name has a problem with their order @extra.order for @extra.description. Please look into it ASAP and call them back with the status.\n \nCustomer Comment: \"@flow.comment\"\nCustomer Name: @extra.name\nCustomer Phone: @contact.tel " - } - ], - "exit_uuid": "b193a69a-d5d9-423a-9f1f-0ad51847a075" - }, - { - "uuid": "1bdc3242-ef13-4c1b-a3b1-11554bffff7a", - "x": 612, - "y": 574, - "destination": "691e8175-f6a1-45b3-b377-c8bda223e52b", - "actions": [ - { - "type": "reply", - "uuid": "fc90459d-243c-4207-a26b-258e2c42cff3", - "msg": { - "base": "Uh oh @extra.name! Our record indicate that your order for @extra.description was cancelled on @extra.cancel_date. If you think this is in error, please reply with a comment and our orders department will get right on it!" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "7e943c3d-b560-436f-bd7e-5c52e9162254" - }, - { - "uuid": "601c7150-7a3e-40aa-8f79-92f936e17cf9", - "x": 389, - "y": 572, - "destination": "691e8175-f6a1-45b3-b377-c8bda223e52b", - "actions": [ - { - "type": "reply", - "uuid": "459ed2db-9921-4326-87a1-5157e0a9b38a", - "msg": { - "base": "Hi @extra.name. Hope you are patient because we haven't shipped your order for @extra.description yet. We expect to ship it by @extra.ship_date though. If you have any questions, just reply and our customer service department will be notified." - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "5747ab91-d20c-4fff-8246-9d29a6ef1511" - }, - { - "uuid": "f87e2df4-5cbb-4961-b3c9-41eed35f8dbe", - "x": 167, - "y": 572, - "destination": "691e8175-f6a1-45b3-b377-c8bda223e52b", - "actions": [ - { - "type": "reply", - "uuid": "661ac1e4-2f13-48b1-adcf-0ff151833a86", - "msg": { - "base": "Great news @extra.name! We shipped your order for @extra.description on @extra.ship_date and we expect it will be delivered on @extra.delivery_date. If you have any questions, just reply and our customer service department will be notified." - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "aee36df6-a421-43b9-be55-a4a298c35f86" - }, - { - "uuid": "81c3ff98-3552-4962-ab05-8f7948ebac24", - "x": 787, - "y": 99, - "destination": "659f67c6-cf6d-4d43-bd64-a50318fd5168", - "actions": [ - { - "type": "reply", - "uuid": "7645e8cd-34a1-44d0-8b11-7f4f06bd5ac7", - "msg": { - "base": "Sorry that doesn't look like a valid order number. Maybe try: CU001, CU002 or CU003?" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "b6e7b7f2-88e5-4457-ba7b-6edb9fb81d9f" - }, - { - "uuid": "fde99613-a3e9-4f97-9e88-81ebc0ea6211", - "x": 409, - "y": 0, - "destination": "659f67c6-cf6d-4d43-bd64-a50318fd5168", - "actions": [ - { - "type": "reply", - "uuid": "c007a761-85c7-48eb-9b38-8d056d1d44ee", - "msg": { - "base": "Thanks for contacting the ThriftShop order status system. Please send your order # and we'll help you in a jiffy!" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "0a300e24-c7fa-473d-b06e-2826fa25b447" - } - ], - "rule_sets": [ - { - "uuid": "691e8175-f6a1-45b3-b377-c8bda223e52b", - "x": 389, - "y": 875, - "label": "Comment", - "rules": [ - { - "uuid": "567cac39-5ee4-4dac-b29a-97dfef2a2eb1", - "category": { - "base": "All Responses" - }, - "destination": "788064a1-fe23-4f6e-8041-200412dff55e", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "O", - "operand": "@step.value", - "config": {} - }, - { - "uuid": "659f67c6-cf6d-4d43-bd64-a50318fd5168", - "x": 356, - "y": 198, - "label": "Lookup Response", - "rules": [ - { - "uuid": "24b3a3a5-1ce8-45d4-87e5-0fa0159a9cab", - "category": { - "base": "All Responses" - }, - "destination": "541382fd-e897-4f77-b468-1f2c7bacf30c", - "destination_type": "R", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "C", - "operand": "@step.value", - "config": {} - }, - { - "uuid": "d8be5901-e847-4b6f-a603-51eb571718a1", - "x": 389, - "y": 1252, - "label": "Extra Comments", - "rules": [ - { - "uuid": "bba334ec-321e-4ead-8d1d-f34d7bc983ad", - "category": { - "base": "All Responses" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "O", - "operand": "@step.value", - "config": {} - }, - { - "uuid": "726f6b34-d6be-46fa-8061-bf1f081b15ce", - "x": 356, - "y": 398, - "label": "Lookup", - "rules": [ - { - "uuid": "d26ac82f-90dc-4f95-b105-7d3ca4effc20", - "category": { - "base": "Shipped" - }, - "destination": "f87e2df4-5cbb-4961-b3c9-41eed35f8dbe", - "destination_type": "A", - "test": { - "type": "contains", - "test": { - "base": "Shipped" - } - }, - "label": null - }, - { - "uuid": "774e6911-cb63-4700-99bc-5e16966393b8", - "category": { - "base": "Pending" - }, - "destination": "601c7150-7a3e-40aa-8f79-92f936e17cf9", - "destination_type": "A", - "test": { - "type": "contains", - "test": { - "base": "Pending" - } - }, - "label": null - }, - { - "uuid": "fee4858c-2545-435b-ae65-d9e6b8f8d106", - "category": { - "base": "Cancelled" - }, - "destination": "1bdc3242-ef13-4c1b-a3b1-11554bffff7a", - "destination_type": "A", - "test": { - "type": "contains", - "test": { - "base": "Cancelled" - } - }, - "label": null - }, - { - "uuid": "24b3a3a5-1ce8-45d4-87e5-0fa0159a9cab", - "category": { - "base": "Other" - }, - "destination": "81c3ff98-3552-4962-ab05-8f7948ebac24", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "expression", - "response_type": "", - "operand": "@extra.status", - "config": {} - }, - { - "uuid": "541382fd-e897-4f77-b468-1f2c7bacf30c", - "x": 356, - "y": 298, - "label": "Lookup Webhook", - "rules": [ - { - "uuid": "24b3a3a5-1ce8-45d4-87e5-0fa0159a9cab", - "category": { - "base": "Success" - }, - "destination": "726f6b34-d6be-46fa-8061-bf1f081b15ce", - "destination_type": "R", - "test": { - "type": "webhook_status", - "status": "success" - }, - "label": null - }, - { - "uuid": "008f4050-7979-42d5-a2cb-d1b4f6bc144f", - "category": { - "base": "Failure" - }, - "destination": "726f6b34-d6be-46fa-8061-bf1f081b15ce", - "destination_type": "R", - "test": { - "type": "webhook_status", - "status": "failure" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "webhook", - "response_type": "", - "operand": "@step.value", - "config": { - "webhook": "https://textit.in/demo/status/", - "webhook_action": null - } - } - ], - "base_language": "base", - "flow_type": "M", - "version": "11.7", - "metadata": { - "notes": [ - { - "body": "This flow demonstrates looking up an order using a webhook and giving the user different options based on the results. After looking up the order the user has the option to send additional comments which are forwarded to customer support representatives.\n\nUse order numbers CU001, CU002 or CU003 to see the different cases in action.", - "x": 59, - "y": 0, - "title": "Using Your Own Data" - } - ], - "saved_on": "2019-01-09T18:29:40.288510Z", - "uuid": "3825c65e-5aa8-4619-8de9-963f68483cb3", - "name": "Sample Flow - Order Status Checker", - "revision": 11, - "expires": 720 - } - } - ] -} diff --git a/media/test_flows/migrate_to_11_9.json b/media/test_flows/migrate_to_11_9.json deleted file mode 100644 index b889215b9ba..00000000000 --- a/media/test_flows/migrate_to_11_9.json +++ /dev/null @@ -1,458 +0,0 @@ -{ - "version": "11.8", - "site": "https://app.rapidpro.io", - "flows": [ - { - "entry": "edea0cb4-00b9-4a53-a923-f4aa38cf18c5", - "action_sets": [ - { - "uuid": "edea0cb4-00b9-4a53-a923-f4aa38cf18c5", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "0b6745ce-6d8b-40d4-bb4f-f18f407bdcdf", - "msg": { - "base": "hi valid" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "d79e8a16-62df-4b48-aff9-fae2633f2b77" - } - ], - "rule_sets": [], - "base_language": "base", - "flow_type": "M", - "version": "11.8", - "metadata": { - "name": "Valid1", - "saved_on": "2018-12-17T12:08:54.146452Z", - "revision": 2, - "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3", - "expires": 10080, - "ivr_retry_failed_events": null - } - }, - { - "entry": "d3e2b506-50cd-4c1e-9573-295bd2087258", - "action_sets": [ - { - "uuid": "d3e2b506-50cd-4c1e-9573-295bd2087258", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "d17b512e-87ed-4717-9461-bc2ffde23b77", - "msg": { - "base": "Hi flow invalid 1" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "c231d9a6-3b53-4670-ac41-247736126ffd" - } - ], - "rule_sets": [], - "base_language": "base", - "flow_type": "M", - "version": "11.8", - "metadata": { - "name": "Invalid1", - "saved_on": "2018-12-17T12:09:52.155509Z", - "revision": 2, - "uuid": "ad40071e-a665-4df3-af14-0bc0fe589244", - "expires": 10080, - "ivr_retry_failed_events": null - } - }, - { - "entry": "932b19a7-245c-4a2b-9249-66d4eb7cfdf7", - "action_sets": [ - { - "uuid": "932b19a7-245c-4a2b-9249-66d4eb7cfdf7", - "x": 100, - "y": 0, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "2f6cdd62-9b29-4597-8af0-3dd410ae46f0", - "msg": { - "base": "Hi flow invalid two" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "0035ccc2-6359-4954-bbc9-bddb90076c25" - } - ], - "rule_sets": [], - "base_language": "base", - "flow_type": "M", - "version": "11.8", - "metadata": { - "name": "Invalid2", - "saved_on": "2018-12-17T12:10:13.269437Z", - "revision": 3, - "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", - "expires": 10080, - "ivr_retry_failed_events": null - } - }, - { - "entry": "544e4ef3-4c54-4bb0-8f89-a1e098b3f030", - "action_sets": [ - { - "uuid": "544e4ef3-4c54-4bb0-8f89-a1e098b3f030", - "x": 375, - "y": 1, - "destination": "a7de0caa-5ab0-4edc-8fc8-33eb31f79cba", - "actions": [ - { - "type": "reply", - "uuid": "64ce02e3-8ea8-414a-a7cf-7f5d3938aa03", - "msg": { - "base": "Hi" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "33d4947d-bdda-4226-bc27-55a6b6e56b36" - }, - { - "uuid": "c9e48e85-b91c-4e2b-bb17-fb670f1559c0", - "x": 420, - "y": 622, - "destination": "861c6312-b2a8-4586-8688-6621d7065497", - "actions": [ - { - "type": "reply", - "uuid": "24b49e66-2520-4f9c-a6f7-f7a43793db53", - "msg": { - "base": "tnx" - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "trigger-flow", - "uuid": "bf31a0f8-73d8-4c81-8f90-ea0d4008a212", - "flow": { - "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", - "name": "Invalid2" - }, - "contacts": [], - "groups": [ - { - "uuid": "ad17d536-0085-4e6b-abc6-222b22d57caa", - "name": "Empty" - } - ], - "variables": [] - } - ], - "exit_uuid": "c8b6b99d-1af5-4bd1-b2d8-b3e87de702e8" - }, - { - "uuid": "6adc7de8-6a84-490a-b3d3-3d1ec607d465", - "x": 580, - "y": 316, - "destination": null, - "actions": [ - { - "type": "reply", - "uuid": "09f0ddee-c27e-4397-bb3c-4a6cf35da77a", - "msg": { - "base": "tyvm" - }, - "media": {}, - "quick_replies": [], - "send_all": false - }, - { - "type": "flow", - "uuid": "95e9750f-9bf4-4ae9-aa07-5c4cde604956", - "flow": { - "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", - "name": "Invalid2" - } - } - ], - "exit_uuid": "1f5d4b7e-7ceb-47cf-91e7-94790f63c9db" - }, - { - "uuid": "ed891a32-6e6d-49b1-88d0-399d2002bce0", - "x": 323, - "y": 993, - "destination": null, - "actions": [ - { - "type": "flow", - "uuid": "70acb970-8b3a-47d0-9fb6-56c5974a582b", - "flow": { - "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3", - "name": "Valid1" - } - } - ], - "exit_uuid": "b5bfc0a1-701e-4e19-ad3e-bf9d7470b241" - }, - { - "uuid": "a750fe69-167b-4ae3-af72-7aae4c2d8b1a", - "x": 598, - "y": 993, - "destination": null, - "actions": [ - { - "type": "flow", - "uuid": "9b0f11bd-fbda-4efe-a41a-8ef101412d95", - "flow": { - "uuid": "136cdab3-e9d1-458c-b6eb-766afd92b478", - "name": "Invalid2" - } - } - ], - "exit_uuid": "1231547d-8f57-4a12-82d1-b1bf3e664010" - }, - { - "uuid": "0bfb7527-c6e9-4452-b780-6755d2041144", - "x": 576, - "y": 169, - "destination": null, - "actions": [ - { - "type": "flow", - "uuid": "e8e85830-1aef-4947-af91-1a2653f3627d", - "flow": { - "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3", - "name": "Valid1" - } - } - ], - "exit_uuid": "8d2c4101-330a-49f7-a4e5-972513c1a995" - } - ], - "rule_sets": [ - { - "uuid": "a7de0caa-5ab0-4edc-8fc8-33eb31f79cba", - "x": 61, - "y": 190, - "label": "Response 1", - "rules": [ - { - "uuid": "a16424a4-95df-4839-813a-bf6bee37f735", - "category": { - "base": "1" - }, - "destination": "9baa6aaf-61bf-4686-8059-1c373a43e5a6", - "destination_type": "R", - "test": { - "type": "eq", - "test": "1" - }, - "label": null - }, - { - "uuid": "f6b45161-f1fe-475f-a4db-7eb300f26415", - "category": { - "base": "2" - }, - "destination": "c9e48e85-b91c-4e2b-bb17-fb670f1559c0", - "destination_type": "A", - "test": { - "type": "eq", - "test": "2" - }, - "label": null - }, - { - "uuid": "59bfd40b-8b94-4555-ac2e-e6883d280df2", - "category": { - "base": "3" - }, - "destination": "6adc7de8-6a84-490a-b3d3-3d1ec607d465", - "destination_type": "A", - "test": { - "type": "eq", - "test": "3" - }, - "label": null - }, - { - "uuid": "7b25509c-94c4-45c1-86cf-2995916ac825", - "category": { - "base": "Other" - }, - "destination": "0bfb7527-c6e9-4452-b780-6755d2041144", - "destination_type": "A", - "test": { - "type": "true" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "wait_message", - "response_type": "", - "operand": "@step.value", - "config": {} - }, - { - "uuid": "9baa6aaf-61bf-4686-8059-1c373a43e5a6", - "x": 51, - "y": 659, - "label": "Response 2", - "rules": [ - { - "uuid": "049a4d45-d50d-468a-ae61-9e55c5dda0ea", - "category": { - "base": "Completed" - }, - "destination": "54c31965-d727-4b0a-a37e-6231551343dc", - "destination_type": "R", - "test": { - "type": "subflow", - "exit_type": "completed" - }, - "label": null - }, - { - "uuid": "101dea88-83bf-4219-973b-d11de45589ae", - "category": { - "base": "Expired" - }, - "destination": "544e4ef3-4c54-4bb0-8f89-a1e098b3f030", - "destination_type": "A", - "test": { - "type": "subflow", - "exit_type": "expired" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "subflow", - "response_type": "", - "operand": "@step.value", - "config": { - "flow": { - "name": "Invalid1", - "uuid": "ad40071e-a665-4df3-af14-0bc0fe589244" - } - } - }, - { - "uuid": "54c31965-d727-4b0a-a37e-6231551343dc", - "x": 36, - "y": 875, - "label": "Response 3", - "rules": [ - { - "uuid": "6a9a30cc-0400-4148-b760-ff342d7ef496", - "category": { - "base": "Completed" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "subflow", - "exit_type": "completed" - }, - "label": null - }, - { - "uuid": "e9e0ad89-6d63-4744-ba35-8042af052a95", - "category": { - "base": "Expired" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "subflow", - "exit_type": "expired" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "subflow", - "response_type": "", - "operand": "@step.value", - "config": { - "flow": { - "name": "Valid1", - "uuid": "b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3" - } - } - }, - { - "uuid": "861c6312-b2a8-4586-8688-6621d7065497", - "x": 409, - "y": 863, - "label": "Response 4", - "rules": [ - { - "uuid": "451cb651-7a59-4d2b-bfe5-753643ad7db2", - "category": { - "base": "1" - }, - "destination": "ed891a32-6e6d-49b1-88d0-399d2002bce0", - "destination_type": "A", - "test": { - "type": "between", - "min": "0", - "max": "0.5" - }, - "label": null - }, - { - "uuid": "39c05550-91a3-4497-9595-2478b5ab6ae4", - "category": { - "base": "2" - }, - "destination": "a750fe69-167b-4ae3-af72-7aae4c2d8b1a", - "destination_type": "A", - "test": { - "type": "between", - "min": "0.5", - "max": "1" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "random", - "response_type": "", - "operand": "@(RAND())", - "config": {} - } - ], - "base_language": "base", - "flow_type": "M", - "version": "11.8", - "metadata": { - "name": "Master", - "saved_on": "2018-12-17T13:54:21.769976Z", - "revision": 56, - "uuid": "8d3f72ef-60b9-4902-b792-d664df502f3f", - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/migrate_to_9.json b/media/test_flows/migrate_to_9.json deleted file mode 100644 index e7e50ed1eb8..00000000000 --- a/media/test_flows/migrate_to_9.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "campaigns": [ - { - "events": [ - { - "event_type": "M", - "relative_to": { - "id": 1134, - "key": "next_appointment", - "label": "Next Show" - }, - "flow": { - "name": "Single Message", - "id": 2814 - }, - "offset": -1, - "delivery_hour": -1, - "message": "Hi there, your next show is @contact.next_show. Don't miss it!", - "id": 9959, - "unit": "H" - } - ], - "group": { - "name": "Pending Appointments", - "id": 2308 - }, - "id": 405, - "name": "Appointment Schedule" - } - ], - "version": 9, - "site": "https://app.rapidpro.io", - "flows": [ - { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "a04f3046-e053-444f-b018-eff019766ad9", - "uuid": "e4a03298-dd43-4afb-b185-2782fc36a006", - "actions": [ - { - "msg": { - "base": "Hi there!" - }, - "type": "reply" - }, - { - "uuid": "c756af8f-4480-4a91-875d-c0600597c0ae", - "contacts": [ - { - "id": contact_id, - "name": "Trey Anastasio" - } - ], - "groups": [], - "variables": [], - "msg": { - "base": "You're phantastic" - }, - "action": "GET", - "type": "send" - }, - { - "labels": [ - { - "name": "this label", - "id": label_id - } - ], - "type": "add_label" - }, - { - "field": "concat_test", - "type": "save", - "value": "@(CONCAT(extra.flow.divided, extra.flow.sky))", - "label": "Concat Test" - }, - { - "field": "normal_test", - "type": "save", - "value": "@extra.contact.name", - "label": "Normal Test" - } - ] - }, - { - "y": 142, - "x": 166, - "destination": null, - "uuid": "a04f3046-e053-444f-b018-eff019766ad9", - "actions": [ - { - "type": "add_group", - "groups": [ - { - "name": "Survey Audience", - "id": group_id - }, - "@(\"Phans\")", - "Survey Audience" - ] - }, - { - "type": "del_group", - "groups": [ - { - "name": "Unsatisfied Customers", - "id": group_id - } - ] - }, - { - "name": "Test flow", - "contacts": [], - "variables": [ - { - "id": "@contact.tel_e164" - } - ], - "groups": [], - "type": "trigger-flow", - "id": start_flow_id - }, - { - "type": "flow", - "name": "Parent Flow", - "id": start_flow_id - } - ] - } - ], - "version": 9, - "flow_type": "F", - "entry": "e4a03298-dd43-4afb-b185-2782fc36a006", - "rule_sets": [], - "metadata": { - "expires": 10080, - "revision": 11, - "id": previous_flow_id, - "name": "Migrate to 9", - "saved_on": "2016-06-22T15:05:12.074490Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/mixed_versions.json b/media/test_flows/mixed_versions.json index b42dbf151d4..210b5129a8b 100644 --- a/media/test_flows/mixed_versions.json +++ b/media/test_flows/mixed_versions.json @@ -1,162 +1,161 @@ { - "version": "11.12", - "site": "https://app.rapidpro.io", - "flows": [ - { - "uuid": "b4af4237-3a0d-4ee2-9ef3-01658c9215e4", - "name": "New Child", - "spec_version": "13.0.0", - "language": "eng", - "type": "messaging", - "revision": 5, - "expire_after_minutes": 10080, - "localization": {}, - "nodes": [ + "version": "11.12", + "site": "https://app.rapidpro.io", + "flows": [ { - "uuid": "54393466-b5a3-4cba-a032-920b62a377bb", - "actions": [ - { - "type": "send_msg", - "uuid": "c6a26419-37d0-47c8-9785-c99f1ce83644", - "text": "Welcome to the child flow" - }, - { - "type": "add_input_labels", - "uuid": "0d04cd6b-de35-4077-83be-7b1b8a2b056b", - "labels": [ + "uuid": "b4af4237-3a0d-4ee2-9ef3-01658c9215e4", + "name": "New Child", + "spec_version": "13.0.0", + "language": "eng", + "type": "messaging", + "revision": 5, + "expire_after_minutes": 10080, + "localization": {}, + "nodes": [ { - "uuid": "a8a6d847-0785-4f97-a8f8-3af98ed111a0", - "name": "Interesting" + "uuid": "54393466-b5a3-4cba-a032-920b62a377bb", + "actions": [ + { + "type": "send_msg", + "uuid": "c6a26419-37d0-47c8-9785-c99f1ce83644", + "text": "Welcome to the child flow" + }, + { + "type": "add_input_labels", + "uuid": "0d04cd6b-de35-4077-83be-7b1b8a2b056b", + "labels": [ + { + "uuid": "a8a6d847-0785-4f97-a8f8-3af98ed111a0", + "name": "Interesting" + } + ] + }, + { + "type": "add_contact_groups", + "uuid": "22cb0ec6-c915-4a9f-9256-bcab549dabe0", + "groups": [ + { + "uuid": "7faadc84-73f6-49c4-812a-d49ed1c8c1ce", + "name": "Survey Audience" + } + ] + } + ], + "exits": [ + { + "uuid": "c8b7ac23-9d47-45b8-aa34-49297de44c84" + } + ] } - ] - }, - { - "type": "add_contact_groups", - "uuid": "22cb0ec6-c915-4a9f-9256-bcab549dabe0", - "groups": [ - { - "uuid": "7faadc84-73f6-49c4-812a-d49ed1c8c1ce", - "name": "Survey Audience" + ], + "_ui": { + "nodes": { + "54393466-b5a3-4cba-a032-920b62a377bb": { + "position": { + "left": 0, + "top": 0 + }, + "type": "execute_actions" + } } - ] - } - ], - "exits": [ - { - "uuid": "c8b7ac23-9d47-45b8-aa34-49297de44c84" - } - ] - } - ], - "_ui": { - "nodes": { - "54393466-b5a3-4cba-a032-920b62a377bb": { - "position": { - "left": 0, - "top": 0 }, - "type": "execute_actions" - } - } - }, - "_dependencies": { - "groups": [ - { - "uuid": "7faadc84-73f6-49c4-812a-d49ed1c8c1ce", - "name": "Survey Audience" - } - ], - "labels": [ - { - "uuid": "a8a6d847-0785-4f97-a8f8-3af98ed111a0", - "name": "Interesting" - } - ] - }, - "_results": [], - "_waiting_exits": [] - }, - { - "entry": "ab20858a-5fcd-492a-9a9d-208d6d4d9593", - "action_sets": [ - { - "uuid": "ab20858a-5fcd-492a-9a9d-208d6d4d9593", - "x": 100, - "y": 0, - "destination": "67c3e969-fe81-43cd-9f9d-059935874379", - "actions": [ - { - "type": "reply", - "uuid": "185c454f-0617-45eb-bbf2-59d1bb8fb500", - "msg": { - "eng": "Welcome to the parent flow" - }, - "media": {}, - "quick_replies": [], - "send_all": false - } - ], - "exit_uuid": "4671b83f-8a1a-450c-8b8d-31dffc461d41" - } - ], - "rule_sets": [ - { - "uuid": "67c3e969-fe81-43cd-9f9d-059935874379", - "x": 275, - "y": 95, - "label": "Subflow", - "rules": [ - { - "uuid": "0fea6668-f1c5-4a70-a99b-290d55f15633", - "category": { - "eng": "Completed" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "subflow", - "exit_type": "completed" - }, - "label": null + "_dependencies": { + "groups": [ + { + "uuid": "7faadc84-73f6-49c4-812a-d49ed1c8c1ce", + "name": "Survey Audience" + } + ], + "labels": [ + { + "uuid": "a8a6d847-0785-4f97-a8f8-3af98ed111a0", + "name": "Interesting" + } + ] }, - { - "uuid": "e7337d15-93e2-46ce-ae51-8cc96c9f5387", - "category": { - "eng": "Expired" - }, - "destination": null, - "destination_type": null, - "test": { - "type": "subflow", - "exit_type": "expired" - }, - "label": null - } - ], - "finished_key": null, - "ruleset_type": "subflow", - "response_type": "", - "operand": "@step.value", - "config": { - "flow": { - "name": "New Child", - "uuid": "b4af4237-3a0d-4ee2-9ef3-01658c9215e4" + "_results": [] + }, + { + "entry": "ab20858a-5fcd-492a-9a9d-208d6d4d9593", + "action_sets": [ + { + "uuid": "ab20858a-5fcd-492a-9a9d-208d6d4d9593", + "x": 100, + "y": 0, + "destination": "67c3e969-fe81-43cd-9f9d-059935874379", + "actions": [ + { + "type": "reply", + "uuid": "185c454f-0617-45eb-bbf2-59d1bb8fb500", + "msg": { + "eng": "Welcome to the parent flow" + }, + "media": {}, + "quick_replies": [], + "send_all": false + } + ], + "exit_uuid": "4671b83f-8a1a-450c-8b8d-31dffc461d41" + } + ], + "rule_sets": [ + { + "uuid": "67c3e969-fe81-43cd-9f9d-059935874379", + "x": 275, + "y": 95, + "label": "Subflow", + "rules": [ + { + "uuid": "0fea6668-f1c5-4a70-a99b-290d55f15633", + "category": { + "eng": "Completed" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "subflow", + "exit_type": "completed" + }, + "label": null + }, + { + "uuid": "e7337d15-93e2-46ce-ae51-8cc96c9f5387", + "category": { + "eng": "Expired" + }, + "destination": null, + "destination_type": null, + "test": { + "type": "subflow", + "exit_type": "expired" + }, + "label": null + } + ], + "finished_key": null, + "ruleset_type": "subflow", + "response_type": "", + "operand": "@step.value", + "config": { + "flow": { + "name": "New Child", + "uuid": "b4af4237-3a0d-4ee2-9ef3-01658c9215e4" + } + } + } + ], + "base_language": "eng", + "flow_type": "M", + "version": "11.12", + "metadata": { + "uuid": "70212996-6bea-4229-bbd3-55e5cd607049", + "name": "Legacy Parent", + "saved_on": "2019-05-30T20:11:49.587074Z", + "revision": 5, + "expires": 10080 } - } } - ], - "base_language": "eng", - "flow_type": "M", - "version": "11.12", - "metadata": { - "uuid": "70212996-6bea-4229-bbd3-55e5cd607049", - "name": "Legacy Parent", - "saved_on": "2019-05-30T20:11:49.587074Z", - "revision": 5, - "expires": 10080 - } - } - ], - "campaigns": [], - "triggers": [] + ], + "campaigns": [], + "triggers": [] } \ No newline at end of file diff --git a/media/test_flows/multi_language_flow.json b/media/test_flows/multi_language_flow.json deleted file mode 100644 index a7a10f91c3e..00000000000 --- a/media/test_flows/multi_language_flow.json +++ /dev/null @@ -1,176 +0,0 @@ -{ - "version": 4, - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "c969c5ba-8595-4e2c-86d0-c2e375afe3e0", - "uuid": "d563e7ca-aa0f-4615-ba8c-eab5e13ff4bf", - "actions": [ - { - "msg": { - "spa": "\u00a1Hola amigo! \u00bfCu\u00e1l es tu color favorito?", - "eng": "Hello friend! What is your favorite color?" - }, - "type": "reply" - } - ] - }, - { - "y": 266, - "x": 351, - "destination": null, - "uuid": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", - "actions": [ - { - "msg": { - "spa": "\u00a1Gracias! Me gusta @flow.color.", - "eng": "Thank you! I like @flow.color." - }, - "type": "reply" - }, - { - "msg": { - "eng": "This message was not translated." - }, - "type": "reply" - } - ] - }, - { - "y": 179, - "x": 683, - "destination": "c969c5ba-8595-4e2c-86d0-c2e375afe3e0", - "uuid": "6ea52610-838c-4f64-8e24-99754135da67", - "actions": [ - { - "msg": { - "spa": "Por favor, una vez m\u00e1s", - "eng": "Please try again." - }, - "type": "reply" - } - ] - } - ], - "last_saved": "2015-02-19T05:55:32.232993Z", - "entry": "d563e7ca-aa0f-4615-ba8c-eab5e13ff4bf", - "rule_sets": [ - { - "uuid": "c969c5ba-8595-4e2c-86d0-c2e375afe3e0", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "spa": "rojo", - "eng": "Red" - }, - "base": "Red", - "type": "contains_any" - }, - "category": { - "spa": "Rojo", - "base": "Red", - "eng": "Red" - }, - "destination": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", - "config": { - "type": "contains_any", - "verbose_name": "has any of these words", - "name": "Contains any", - "localized": true, - "operands": 1 - }, - "uuid": "de555b2c-2616-49ff-8564-409a01b0bd79" - }, - { - "test": { - "test": { - "spa": "verde", - "eng": "Green" - }, - "base": "Green", - "type": "contains_any" - }, - "category": { - "spa": "Verde", - "base": "Green", - "eng": "Green" - }, - "destination": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", - "config": { - "type": "contains_any", - "verbose_name": "has any of these words", - "name": "Contains any", - "localized": true, - "operands": 1 - }, - "uuid": "e09c7ad3-46c8-4024-9fcf-8a0d26d97d6a" - }, - { - "test": { - "test": { - "spa": "azul", - "eng": "Blue" - }, - "base": "Blue", - "type": "contains_any" - }, - "category": { - "spa": "Azul", - "base": "Blue", - "eng": "Blue" - }, - "destination": "5532bc8e-ecf8-42ad-9654-bb4b3374001e", - "config": { - "type": "contains_any", - "verbose_name": "has any of these words", - "name": "Contains any", - "localized": true, - "operands": 1 - }, - "uuid": "aafd9e60-4d74-40cb-a923-3501560cb5c1" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "spa": "Otro", - "base": "Other", - "eng": "Other" - }, - "destination": "6ea52610-838c-4f64-8e24-99754135da67", - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - }, - "uuid": "2263684a-0354-448e-8213-c57644e91798" - } - ], - "webhook": null, - "label": "Color", - "operand": "@step.value", - "finished_key": null, - "response_type": "C", - "y": 132, - "x": 242 - } - ], - "metadata": {} - }, - "id": 1400, - "flow_type": "F", - "name": "Multi Language Flow" - } - ], - "triggers": [] -} diff --git a/media/test_flows/no_base_language_v8.json b/media/test_flows/no_base_language_v8.json deleted file mode 100644 index 18f5ccfc07d..00000000000 --- a/media/test_flows/no_base_language_v8.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "version": 8, - "flows": [ - { - "base_language": null, - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "f614e8c9-eeb6-4c94-bd07-b4bbe8a95b47", - "actions": [ - { - "type": "add_group", - "groups": [ - { - "name": "A New Group", - "id": 44899 - } - ] - }, - { - "field": "location", - "type": "save", - "value": "Seattle, WA", - "label": "Location" - }, - { - "lang": "eng", - "type": "lang", - "name": "English" - } - ] - } - ], - "version": 8, - "flow_type": "F", - "entry": "f614e8c9-eeb6-4c94-bd07-b4bbe8a95b47", - "rule_sets": [], - "metadata": { - "expires": 720, - "saved_on": "2015-11-19T00:30:09.477009Z", - "id": 42104, - "name": "Join New Group", - "revision": 6 - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/no_ruleset_flow.json b/media/test_flows/no_ruleset_flow.json deleted file mode 100644 index 46c1806a98c..00000000000 --- a/media/test_flows/no_ruleset_flow.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "version": 8, - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "e41e7aad-de93-4cc0-ae56-d6af15ba1ac5", - "actions": [ - { - "msg": { - "eng": "Hello world" - }, - "type": "reply" - } - ] - } - ], - "version": 8, - "flow_type": "F", - "entry": "e41e7aad-de93-4cc0-ae56-d6af15ba1ac5", - "rule_sets": [], - "metadata": { - "expires": 10080, - "revision": 1, - "id": 41049, - "name": "No ruleset flow", - "saved_on": "2015-11-20T11:02:19.790131Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/non_localized_ruleset.json b/media/test_flows/non_localized_ruleset.json deleted file mode 100644 index a4cbc27c3e8..00000000000 --- a/media/test_flows/non_localized_ruleset.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "version": 8, - "flows": [ - { - "base_language": "eng", - "action_sets": [], - "version": 8, - "flow_type": "F", - "entry": "99696ed8-2555-4d18-ac0b-f9b9d85abf30", - "rule_sets": [ - { - "uuid": "99696ed8-2555-4d18-ac0b-f9b9d85abf30", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": "All Responses", - "uuid": "9b31bbfe-23d7-4838-806a-1a3989de3f37" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 0, - "x": 100, - "config": {} - } - ], - "metadata": { - "expires": 10080, - "revision": 1, - "id": 42135, - "name": "Empty", - "saved_on": "2015-11-19T22:31:15.972687Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/non_localized_with_language.json b/media/test_flows/non_localized_with_language.json deleted file mode 100644 index 03daa5be452..00000000000 --- a/media/test_flows/non_localized_with_language.json +++ /dev/null @@ -1,332 +0,0 @@ -{ - "version": 8, - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 991, - "x": 389, - "destination": "7d1b7019-b611-4132-9ba4-af36cc167398", - "uuid": "49189b3e-8e2b-473f-bec2-10378f5a7c06", - "actions": [ - { - "msg": "Thanks @extra.name, we'll be in touch ASAP about order # @extra.order.", - "type": "reply" - }, - { - "msg": "Customer @extra.name has a problem with their order @extra.order for @extra.description. Please look into it ASAP and call them back with the status.\n \nCustomer Comment: \"@flow.comment\"\nCustomer Name: @extra.name\nCustomer Phone: @contact.tel ", - "type": "email", - "emails": [ - "name@domain.com" - ], - "subject": "Order Comment: @flow.lookup: @extra.order" - } - ] - }, - { - "y": 574, - "x": 612, - "destination": "6f550596-98a2-44fb-b769-b3c529f1b963", - "uuid": "8618411e-a35e-472b-b867-3339aa46027a", - "actions": [ - { - "msg": "Uh oh @extra.name! Our record indicate that your order for @extra.description was cancelled on @extra.cancel_date. If you think this is in error, please reply with a comment and our orders department will get right on it!", - "type": "reply" - } - ] - }, - { - "y": 572, - "x": 389, - "destination": "6f550596-98a2-44fb-b769-b3c529f1b963", - "uuid": "32bb903e-44c2-40f9-b65f-c8cda6490ee6", - "actions": [ - { - "msg": "Hi @extra.name. Hope you are patient because we haven't shipped your order for @extra.description yet. We expect to ship it by @extra.ship_date though. If you have any questions, just reply and our customer service department will be notified.", - "type": "reply" - } - ] - }, - { - "y": 572, - "x": 167, - "destination": "6f550596-98a2-44fb-b769-b3c529f1b963", - "uuid": "bf36a209-4e21-44ac-835a-c3d5889aa2fb", - "actions": [ - { - "msg": "Great news @extra.name! We shipped your order for @extra.description on @extra.ship_date and we expect it will be delivered on @extra.delivery_date. If you have any questions, just reply and our customer service department will be notified.", - "type": "reply" - } - ] - }, - { - "y": 99, - "x": 787, - "destination": "69c427a4-b9b6-4f67-9e35-f783b3e81bfd", - "uuid": "7f4c29e3-f022-420d-8e2f-6165c572b991", - "actions": [ - { - "msg": "Sorry that doesn't look like a valid order number. Maybe try: CU001, CU002 or CU003?", - "type": "reply" - } - ] - }, - { - "y": 0, - "x": 409, - "destination": "69c427a4-b9b6-4f67-9e35-f783b3e81bfd", - "uuid": "4f79034a-51e0-4210-99cc-17f385de4de8", - "actions": [ - { - "msg": "Thanks for contacting the ThriftShop order status system. Please send your order # and we'll help you in a jiffy!", - "type": "reply" - } - ] - }, - { - "y": 854, - "x": 776, - "destination": "2cb5adcd-31b1-4d21-a0df-c5375cea1963", - "uuid": "6f550596-98a2-44fb-b769-b3c529f1b963", - "actions": [ - { - "msg": "@flow.lookup_response", - "type": "reply" - } - ] - }, - { - "y": 1430, - "x": 233, - "destination": "ad1d5767-8dfd-4c5d-b2e8-a997adb3a276", - "uuid": "81613e37-414c-4d73-884b-4ee7ae0fd913", - "actions": [ - { - "msg": "asdf", - "type": "reply" - } - ] - } - ], - "version": 8, - "flow_type": "F", - "entry": "4f79034a-51e0-4210-99cc-17f385de4de8", - "rule_sets": [ - { - "uuid": "2cb5adcd-31b1-4d21-a0df-c5375cea1963", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": "All Responses", - "destination": "49189b3e-8e2b-473f-bec2-10378f5a7c06", - "uuid": "088470d7-c4a9-4dd7-8be4-d10faf02fcea", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Comment", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 955, - "x": 762, - "config": {} - }, - { - "uuid": "69c427a4-b9b6-4f67-9e35-f783b3e81bfd", - "webhook_action": null, - "rules": [ - { - "category": "All Responses", - "uuid": "c85136c2-dcdd-4c4b-835d-a083ebde5e07", - "destination": "b3bd5abb-3f70-4af5-85eb-d07900f9cb85", - "destination_type": "R", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Lookup Responses", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 198, - "x": 356, - "config": {} - }, - { - "uuid": "7d1b7019-b611-4132-9ba4-af36cc167398", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": "All Responses", - "destination": "81613e37-414c-4d73-884b-4ee7ae0fd913", - "uuid": "124f3266-bc62-4743-b4b1-79fee0d45ad9", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Extra Comments", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 1252, - "x": 389, - "config": {} - }, - { - "uuid": "6baa1d6b-ee70-4d7c-85b3-22ed94281227", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "Shipped", - "type": "contains" - }, - "category": "Shipped", - "destination": "bf36a209-4e21-44ac-835a-c3d5889aa2fb", - "uuid": "bb336f83-3a5f-4a2e-ad42-757a0a79892b", - "destination_type": "A" - }, - { - "test": { - "test": "Pending", - "type": "contains" - }, - "category": "Pending", - "destination": "32bb903e-44c2-40f9-b65f-c8cda6490ee6", - "uuid": "91826255-5a81-418c-aadb-3378802a1134", - "destination_type": "A" - }, - { - "test": { - "test": "Cancelled", - "type": "contains" - }, - "category": "Cancelled", - "destination": "8618411e-a35e-472b-b867-3339aa46027a", - "uuid": "1efa73d0-e30c-4495-a5c8-724b48385839", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": "7f4c29e3-f022-420d-8e2f-6165c572b991", - "uuid": "c85136c2-dcdd-4c4b-835d-a083ebde5e07", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "expression", - "label": "Lookup", - "operand": "@extra.status", - "finished_key": null, - "response_type": "", - "y": 398, - "x": 356, - "config": {} - }, - { - "uuid": "b3bd5abb-3f70-4af5-85eb-d07900f9cb85", - "webhook_action": "POST", - "rules": [ - { - "category": "All Responses", - "uuid": "c85136c2-dcdd-4c4b-835d-a083ebde5e07", - "destination": "6baa1d6b-ee70-4d7c-85b3-22ed94281227", - "destination_type": "R", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": "https://api.textit.in/demo/status/", - "ruleset_type": "webhook", - "label": "Lookup Webhook", - "operand": "@extra.status", - "finished_key": null, - "response_type": "", - "y": 298, - "x": 356, - "config": {} - }, - { - "uuid": "ad1d5767-8dfd-4c5d-b2e8-a997adb3a276", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": "All Responses", - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - }, - "uuid": "439c839b-f04a-4394-9b8b-be91ca0991bd" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Boo", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 1580, - "x": 362, - "config": {} - } - ], - "metadata": { - "uuid": "2ed28d6a-61cd-436a-9159-01b024992e78", - "notes": [ - { - "body": "This flow demonstrates looking up an order using a webhook and giving the user different options based on the results. After looking up the order the user has the option to send additional comments which are forwarded to customer support representatives.\n\nUse order numbers CU001, CU002 or CU003 to see the different cases in action.", - "x": 59, - "y": 0, - "title": "Using Your Own Data" - } - ], - "expires": 720, - "name": "Sample Flow - Order Status Checker", - "saved_on": "2015-11-19T19:32:17.523441Z", - "id": 42133, - "revision": 1 - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/not_fully_localized.json b/media/test_flows/not_fully_localized.json deleted file mode 100644 index b64ef4690a7..00000000000 --- a/media/test_flows/not_fully_localized.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "version": 7, - "flows": [ - { - "version": 7, - "flow_type": "F", - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "127f3736-77ce-4006-9ab0-0c07cea88956", - "actions": [ - { - "msg": { - "base": "What is your favorite color?" - }, - "type": "reply" - } - ] - }, - ], - "last_saved": "2015-09-15T02:37:08.805578Z", - "entry": "127f3736-77ce-4006-9ab0-0c07cea88956", - "rule_sets": [], - "metadata": { - "notes": [], - "name": "Not fully localized", - "id": 35559, - "expires": 720, - "revision": 1 - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/numeric_rule_allows_variables.json b/media/test_flows/numeric_rule_allows_variables.json deleted file mode 100644 index 7b5fe83750b..00000000000 --- a/media/test_flows/numeric_rule_allows_variables.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "version": 8, - "flows": [ - { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "31065f6b-4054-4560-adac-d8f4a0ec57c7", - "uuid": "f08c61ae-8757-4b4a-924b-9e97afdf15f4", - "actions": [ - { - "msg": { - "base": "How old will you be in the next world cup?" - }, - "type": "reply" - } - ] - }, - { - "y": 370, - "x": 59, - "destination": null, - "uuid": "116b7cc1-5086-4e0d-b0ee-ea3f73d0f06f", - "actions": [ - { - "msg": { - "base": "Good count" - }, - "type": "reply" - } - ] - }, - { - "y": 358, - "x": 429, - "destination": null, - "uuid": "34ca7cb8-b899-46b3-a5d2-11dd13f89541", - "actions": [ - { - "msg": { - "base": "Try again" - }, - "type": "reply" - } - ] - } - ], - "version": 8, - "flow_type": "F", - "entry": "f08c61ae-8757-4b4a-924b-9e97afdf15f4", - "rule_sets": [ - { - "uuid": "31065f6b-4054-4560-adac-d8f4a0ec57c7", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "@contact.age", - "type": "gt" - }, - "category": { - "base": "> @contact.age" - }, - "destination": "116b7cc1-5086-4e0d-b0ee-ea3f73d0f06f", - "uuid": "d164c264-1f48-478c-9ffb-f7207e679ed5", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "34ca7cb8-b899-46b3-a5d2-11dd13f89541", - "uuid": "2d0e800d-ec33-4fcd-a660-3782cf65dcff", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 210, - "x": 153, - "config": {} - } - ], - "metadata": { - "expires": 10080, - "revision": 7, - "id": 41052, - "name": "Numeric rule allows variables", - "saved_on": "2015-11-30T19:29:37.385369Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/old_expressions.json b/media/test_flows/old_expressions.json deleted file mode 100644 index 2b36334bd42..00000000000 --- a/media/test_flows/old_expressions.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "version": 7, - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "a32d0ebb-57aa-452e-bd8d-ae5febee4440", - "uuid": "a26285b1-134b-421b-9853-af0f26d13777", - "actions": [ - { - "msg": { - "eng": "Hi @contact.name|upper_case. Today is =(date.now)" - }, - "type": "reply" - } - ] - }, - { - "y": 350, - "x": 164, - "destination": null, - "uuid": "054d9e01-8e68-4f6d-9cf3-44407256670e", - "actions": [ - { - "type": "add_group", - "groups": [ - "=flow.response_1.category" - ] - }, - { - "msg": { - "eng": "Was @contact.name|lower_case|title_case." - }, - "variables": [ - { - "id": "=flow.response_1.category" - } - ], - "type": "send", - "groups": [], - "contacts": [] - } - ] - } - ], - "last_saved": "2015-09-23T07:54:10.928652Z", - "entry": "a26285b1-134b-421b-9853-af0f26d13777", - "rule_sets": [ - { - "uuid": "a32d0ebb-57aa-452e-bd8d-ae5febee4440", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "028c71a3-0696-4d98-8ff3-0dc700811124", - "uuid": "bf879f78-aff8-4c64-9326-e92f677af5cf", - "destination_type": "R" - } - ], - "webhook": "http://example.com/query.php?contact=@contact.name|upper_case", - "ruleset_type": "webhook", - "label": "Response 1", - "operand": "=(step.value)", - "finished_key": null, - "response_type": "", - "y": 134, - "x": 237, - "config": {} - }, - { - "uuid": "028c71a3-0696-4d98-8ff3-0dc700811124", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "054d9e01-8e68-4f6d-9cf3-44407256670e", - "uuid": "35ba932c-d45a-4cf5-bd0b-41fd9b80cc27", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "expression", - "label": "Response 2", - "operand": "@step.value|time_delta:\"3\"", - "finished_key": null, - "response_type": "", - "y": 240, - "x": 203, - "config": {} - } - ], - "type": "F", - "metadata": {} - }, - "expires": 10080, - "id": 31427, - "flow_type": "F", - "name": "Old Expressions" - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/parent.json b/media/test_flows/parent.json deleted file mode 100644 index a413e7369ce..00000000000 --- a/media/test_flows/parent.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "entry": "2f0e3397-3b9e-4593-b1d2-04ecfddb4f8f", - "rule_sets": [], - "action_sets": [ - { - "y": 1, - "x": 127, - "destination": null, - "uuid": "2f0e3397-3b9e-4593-b1d2-04ecfddb4f8f", - "actions": [ - { - "uuid": "59d5ba21-e61d-4bb7-a898-33bb2164987e", - "value": "None", - "label": "Campaign Date", - "field": "campaign_date", - "action": "GET", - "type": "save" - }, - { - "uuid": "40b35b95-fdbc-4ba4-b91e-c3c1911c1f3c", - "type": "flow", - "name": "Child Flow", - "id": CHILD_ID - }, - { - "action": "GET", - "type": "add_group", - "uuid": "4ea70294-ca92-478c-b0f4-ffc4fd858412", - "groups": [ - { - "name": "Campaign" - } - ] - }, - { - "msg": "Added to campaign.", - "action": "GET", - "type": "reply", - "uuid": "8a267e99-1b75-4e6d-bafc-9bc65629ad0a" - } - ] - } - ], - "last_saved": "2014-11-20T21:14:51.848399Z", - "metadata": {} - }, - "flow_type": "F", - "name": "Parent", - "id": 2000 - } - ], - "triggers": [] -} diff --git a/media/test_flows/pick_a_number.json b/media/test_flows/pick_a_number.json deleted file mode 100644 index 8db008735f1..00000000000 --- a/media/test_flows/pick_a_number.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "rule_sets": [ - { - "y": 106, - "x": 100, - "response_type": "C", - "rules": [ - { - "test": { - "max": "10", - "type": "between", - "min": "1" - }, - "destination": "9a8ba8b2-8c80-4635-9f5d-015c15fdc44a", - "uuid": "41418f9d-73e5-43b8-a341-3f7af70e13c1" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": null, - "uuid": "e53c2616-7b8d-4821-968a-4488e9980454" - } - ], - "uuid": "06bb3899-5de4-4cbc-ad5f-70b9634d80c4", - "label": "number" - }, - { - "y": 300, - "x": 300, - "response_type": "C", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": "All Responses", - "destination": "06bb3899-5de4-4cbc-ad5f-70b9634d80c4", - "uuid": "9df37f4c-73ca-4876-8490-35f984486df6" - } - ], - "uuid": "c1a5c78e-560b-45b1-83b1-1dad9ce57a06", - "label": "passive", - "operand": "@contact.name" - } - ], - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "06bb3899-5de4-4cbc-ad5f-70b9634d80c4", - "uuid": "2f2adf23-87db-41d3-9436-afe48ab5403c", - "actions": [ - { - "msg": "Pick a number between 1-10.", - "type": "reply" - } - ] - }, - { - "y": 228, - "x": 118, - "destination": null, - "uuid": "9a8ba8b2-8c80-4635-9f5d-015c15fdc44a", - "actions": [ - { - "msg": "You picked @flow.number!", - "type": "reply" - } - ] - } - ] - }, - "flow_type": "F", - "name": "Pick a Number", - "id": 2100 - } - ], - "triggers": [] -} diff --git a/media/test_flows/preprocess.json b/media/test_flows/preprocess.json deleted file mode 100644 index d8616b5068a..00000000000 --- a/media/test_flows/preprocess.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "rule_sets": [ - { - "y": 106, - "x": 100, - "webhook": "http://preprocessor.com/endpoint.php", - "response_type": "N", - "rules": [ - { - "test": { - "max": "10", - "type": "between", - "min": "1" - }, - "destination": "9a8ba8b2-8c80-4635-9f5d-015c15fdc44a", - "uuid": "41418f9d-73e5-43b8-a341-3f7af70e13c1" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": null, - "uuid": "e53c2616-7b8d-4821-968a-4488e9980454" - } - ], - "uuid": "06bb3899-5de4-4cbc-ad5f-70b9634d80c4", - "label": "number" - } - ], - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "06bb3899-5de4-4cbc-ad5f-70b9634d80c4", - "uuid": "2f2adf23-87db-41d3-9436-afe48ab5403c", - "actions": [ - { - "msg": "Pick a number between 1-10.", - "type": "reply" - } - ] - }, - { - "y": 228, - "x": 118, - "destination": null, - "uuid": "9a8ba8b2-8c80-4635-9f5d-015c15fdc44a", - "actions": [ - { - "msg": "You picked @flow.number!", - "type": "reply" - } - ] - } - ] - }, - "flow_type": "F", - "name": "Preprocess", - "id": 2200 - } - ], - "triggers": [] -} diff --git a/media/test_flows/quick_replies.json b/media/test_flows/quick_replies.json deleted file mode 100644 index c48ef44d30b..00000000000 --- a/media/test_flows/quick_replies.json +++ /dev/null @@ -1,156 +0,0 @@ -{ - "campaigns": [], - "version": 10, - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 44, - "x": 223, - "destination": "4297b822-734e-44cb-a1c9-2e20bc2cdb19", - "uuid": "163462f8-8a82-49df-ab8a-4eee3f7b9feb", - "actions": [ - { - "msg": { - "por": "Voc\u00ea gosta de jogar futebol?", - "eng": "Do you like to play football?" - }, - "media": {}, - "send_all": false, - "type": "reply", - "quick_replies": [ - { - "por": "Sim", - "eng": "Yes" - }, - { - "eng": "No" - } - ] - } - ] - }, - { - "y": 319, - "x": 262, - "destination": null, - "uuid": "a26a36b9-d4d7-4355-aad3-2fc86e84a7f1", - "actions": [ - { - "msg": { - "eng": "Good!" - }, - "media": {}, - "send_all": false, - "type": "reply", - "quick_replies": [] - } - ] - }, - { - "y": 318, - "x": 509, - "destination": null, - "uuid": "cdf0b558-6a44-44c9-8bee-6a6b6e8ad9c4", - "actions": [ - { - "msg": { - "eng": ":(" - }, - "media": {}, - "send_all": false, - "type": "reply", - "quick_replies": [] - } - ] - }, - { - "y": 173, - "x": 749, - "destination": "4297b822-734e-44cb-a1c9-2e20bc2cdb19", - "uuid": "4b7366eb-6099-4135-9a00-72492e6fdb8d", - "actions": [ - { - "msg": { - "eng": "Sorry, I don't understand." - }, - "media": {}, - "send_all": false, - "type": "reply", - "quick_replies": [] - } - ] - } - ], - "version": 10, - "flow_type": "F", - "entry": "163462f8-8a82-49df-ab8a-4eee3f7b9feb", - "rule_sets": [ - { - "uuid": "4297b822-734e-44cb-a1c9-2e20bc2cdb19", - "rules": [ - { - "category": { - "eng": "Yes" - }, - "uuid": "e8014483-e9ee-4384-85ea-f88c67ddf494", - "destination": "a26a36b9-d4d7-4355-aad3-2fc86e84a7f1", - "label": null, - "destination_type": "A", - "test": { - "test": { - "eng": "Yes, Sim" - }, - "type": "contains_any" - } - }, - { - "category": { - "eng": "No" - }, - "uuid": "a0b594ae-491f-4f2c-93af-0f158f69a5d8", - "destination": "cdf0b558-6a44-44c9-8bee-6a6b6e8ad9c4", - "label": null, - "destination_type": "A", - "test": { - "test": { - "eng": "No" - }, - "type": "contains_any" - } - }, - { - "category": { - "eng": "Other" - }, - "uuid": "b27d6067-f3fd-4b9b-a79a-127791987ec5", - "destination": "4b7366eb-6099-4135-9a00-72492e6fdb8d", - "label": null, - "destination_type": "A", - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 160, - "x": 381, - "config": {} - } - ], - "metadata": { - "expires": 10080, - "revision": 106, - "uuid": "711489ef-87c9-4fbc-8e6e-92af2b671fc4", - "name": "Quick Replies", - "saved_on": "2017-10-13T13:09:02.747795Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/random_word.json b/media/test_flows/random_word.json deleted file mode 100644 index 45cd0b0e5a2..00000000000 --- a/media/test_flows/random_word.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "fbb21cb8-eaf0-45fc-a5e3-31c5f5c1d55e", - "uuid": "8731e312-cdf1-412c-8a7d-6cc603de9cf9", - "actions": [ - { - "msg": { - "eng": "Write me a random word." - }, - "type": "reply" - } - ] - }, - { - "y": 406, - "x": 228, - "destination": null, - "uuid": "395f0a8e-b4fa-4a73-af33-98134505a3d7", - "actions": [ - { - "msg": { - "eng": "Thank you" - }, - "type": "reply" - } - ] - } - ], - "last_saved": "2014-08-11T15:08:22.724512Z", - "entry": "8731e312-cdf1-412c-8a7d-6cc603de9cf9", - "rule_sets": [ - { - "uuid": "fbb21cb8-eaf0-45fc-a5e3-31c5f5c1d55e", - "response_type": "O", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "395f0a8e-b4fa-4a73-af33-98134505a3d7", - "uuid": "3ec97b15-cfd8-4500-947f-56cae2441c99" - } - ], - "label": "Random", - "operand": "@step.value", - "y": 239, - "x": 136 - } - ], - "metadata": { - "notes": [] - } - }, - "flow_type": "F", - "name": "Random Word", - "id": 2300 - } - ], - "triggers": [] -} diff --git a/media/test_flows/rules_first.json b/media/test_flows/rules_first.json deleted file mode 100644 index 35d52c9b652..00000000000 --- a/media/test_flows/rules_first.json +++ /dev/null @@ -1,120 +0,0 @@ -{ - "version": 7, - "flows": [ - { - "version": 7, - "base_language": "base", - "action_sets": [ - { - "y": 161, - "x": 114, - "destination": null, - "uuid": "0fa491db-a447-4940-a7c8-c682f0e9ae3b", - "actions": [ - { - "msg": { - "base": "You've got to be kitten me" - }, - "type": "reply" - } - ] - }, - { - "y": 160, - "x": 342, - "destination": null, - "uuid": "29825823-69e3-47d7-a139-90c4851de0a3", - "actions": [ - { - "msg": { - "base": "Raise the woof!" - }, - "type": "reply" - } - ] - }, - { - "y": 100, - "x": 602, - "destination": "737527ae-ade5-4b55-944a-94a67b79cec5", - "uuid": "8e89b350-4b96-480c-b4e5-31f38f40bfe5", - "actions": [ - { - "msg": { - "base": "Is that even an animal?" - }, - "type": "reply" - } - ] - } - ], - "last_saved": "2015-09-15T02:38:14.494272Z", - "entry": "737527ae-ade5-4b55-944a-94a67b79cec5", - "rule_sets": [ - { - "uuid": "737527ae-ade5-4b55-944a-94a67b79cec5", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "base": "Cats" - }, - "type": "contains_any" - }, - "category": { - "base": "Cats" - }, - "destination": "0fa491db-a447-4940-a7c8-c682f0e9ae3b", - "uuid": "be41b657-cbfa-433a-9ffe-4fbcaf7fe15e", - "destination_type": "A" - }, - { - "test": { - "test": { - "base": "Dogs" - }, - "type": "contains_any" - }, - "category": { - "base": "Dogs" - }, - "destination": "29825823-69e3-47d7-a139-90c4851de0a3", - "uuid": "f1b8745b-beb3-4431-9e8e-01a214f20e3e", - "destination_type": "A" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": "8e89b350-4b96-480c-b4e5-31f38f40bfe5", - "uuid": "a329fbaa-49cf-4a5e-8e12-8df801344715", - "destination_type": "A" - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Animal", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 0, - "x": 260, - "config": {} - } - ], - "flow_type": "F", - "metadata": { - "notes": [], - "expires": 720, - "id": 35560, - "name": "Rules First" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/ruleset_loop.json b/media/test_flows/ruleset_loop.json deleted file mode 100644 index 71a1dc327e4..00000000000 --- a/media/test_flows/ruleset_loop.json +++ /dev/null @@ -1,142 +0,0 @@ -{ - "campaigns": [], - "version": 4, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "base_language": "ind", - "action_sets": [ - { - "y": 189, - "x": 157, - "destination": null, - "uuid": "c1474749-cfcb-4911-a93f-31ba67b64d57", - "actions": [ - { - "type": "flow", - "name": "Flow 2", - "id": 27668 - } - ] - } - ], - "last_saved": "2015-03-25T23:18:57.977877Z", - "entry": "e41fa402-0946-451f-8971-ac6adb6a0cc6", - "rule_sets": [ - { - "uuid": "e41fa402-0946-451f-8971-ac6adb6a0cc6", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": { - "ind": "awesome" - }, - "type": "contains_any" - }, - "category": { - "ind": "Awesome", - "base": "Awesome" - }, - "destination": null, - "uuid": "d0f480bb-a1e4-4aed-bbfe-04bb34ada1f2" - }, - { - "test": { - "type": "true" - }, - "category": { - "ind": "Other", - "base": "Other" - }, - "destination": "c1474749-cfcb-4911-a93f-31ba67b64d57", - "uuid": "5a2aa691-9103-4cf0-a2af-7df9be3658b7" - } - ], - "webhook": null, - "label": "Response 1", - "operand": "@contact.name", - "finished_key": null, - "response_type": "C", - "y": 0, - "x": 110 - } - ], - "metadata": { - "notes": [] - } - }, - "id": 27667, - "flow_type": "F", - "name": "Flow 1" - }, - { - "definition": { - "base_language": "ind", - "action_sets": [ - { - "y": 241, - "x": 180, - "destination": null, - "uuid": "e32f191b-d667-4a9e-9820-6faaf98d9a27", - "actions": [ - { - "type": "flow", - "name": "Flow 1", - "id": 27667 - } - ] - } - ], - "last_saved": "2015-03-25T23:18:28.682837Z", - "entry": "1b155cb4-1457-4430-899e-72a21a1843e8", - "rule_sets": [ - { - "uuid": "1b155cb4-1457-4430-899e-72a21a1843e8", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": { - "ind": "awesome" - }, - "type": "contains_any" - }, - "category": { - "ind": "Awesome", - "base": "Awesome" - }, - "destination": null, - "uuid": "024aea08-84e2-4ce7-9032-422bdf9d4a79" - }, - { - "test": { - "type": "true" - }, - "category": { - "ind": "Other", - "base": "All Responses" - }, - "destination": "e32f191b-d667-4a9e-9820-6faaf98d9a27", - "uuid": "1469f833-3b09-4a25-bccf-fb5c05d50876" - } - ], - "webhook": null, - "label": "Response 1", - "operand": "@contact.name", - "finished_key": null, - "response_type": "C", - "y": 0, - "x": 126 - } - ], - "metadata": {} - }, - "id": 2700, - "flow_type": "F", - "name": "Flow 2" - } - ], - "triggers": [] -} diff --git a/media/test_flows/send_all.json b/media/test_flows/send_all.json deleted file mode 100644 index 292a1991240..00000000000 --- a/media/test_flows/send_all.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "campaigns": [], - "version": 10, - "site": "https://app.rapidpro.io", - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "14fb87aa-22cc-4e9a-a8d6-dd4640426bed", - "actions": [ - { - "msg": { - "eng": "Hey, how are you?" - }, - "media": {}, - "send_all": true, - "type": "reply" - } - ] - } - ], - "version": 10, - "flow_type": "F", - "entry": "14fb87aa-22cc-4e9a-a8d6-dd4640426bed", - "rule_sets": [], - "metadata": { - "expires": 10080, - "revision": 1, - "uuid": "4b8089fb-bb10-4cbc-800f-a0aa8bb21713", - "name": "Send All", - "saved_on": "2017-03-16T14:53:04.119831Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/single_message_bad_localization.json b/media/test_flows/single_message_bad_localization.json deleted file mode 100644 index 9ac97190f1d..00000000000 --- a/media/test_flows/single_message_bad_localization.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "version":10, - "flows":[ - { - "base_language":"eng", - "rule_sets":[ - - ], - "action_sets":[ - { - "y":0, - "x":100, - "uuid":"37fe93f8-edf5-40f3-b029-3b391fa528d0", - "actions":[ - { - "msg":"Campaign Message 12", - "type":"reply", - "uuid":"9bdb1aab-e42e-4585-8395-6504c4a683ed" - } - ] - } - ], - "entry":"37fe93f8-edf5-40f3-b029-3b391fa528d0" - } - ], - "triggers":[ - - ] -} \ No newline at end of file diff --git a/media/test_flows/sms_form.json b/media/test_flows/sms_form.json deleted file mode 100644 index 3b7767fdfe0..00000000000 --- a/media/test_flows/sms_form.json +++ /dev/null @@ -1,299 +0,0 @@ -{ - "version": 5, - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 119, - "destination": "d0e01dde-dcd2-43e9-8f8e-ae1699a80395", - "uuid": "2e6aaa75-ffb7-4c48-baee-57e4149e452c", - "actions": [ - { - "msg": { - "eng": "What is your age, sex, location? Separate your responses with a space. For example \"15 f seattle\"." - }, - "type": "reply" - } - ] - }, - { - "y": 265, - "x": 904, - "destination": "d0e01dde-dcd2-43e9-8f8e-ae1699a80395", - "uuid": "9f1c79ae-581a-45ff-a9ea-4096f8231aad", - "actions": [ - { - "msg": { - "eng": "Sorry, @flow.age doesn't look like a valid age, please try again." - }, - "type": "reply" - } - ] - }, - { - "y": 414, - "x": 831, - "destination": "d0e01dde-dcd2-43e9-8f8e-ae1699a80395", - "uuid": "1cc063a7-afea-460d-b8a0-c8c2a2e37e35", - "actions": [ - { - "msg": { - "eng": "Sorry, @flow.gender doesn't look like a valid gender. Try again." - }, - "type": "reply" - } - ] - }, - { - "y": 571, - "x": 735, - "destination": "d0e01dde-dcd2-43e9-8f8e-ae1699a80395", - "uuid": "6be94ef5-bffc-4864-bd71-8e7cd87d7178", - "actions": [ - { - "msg": { - "eng": "I don't know the location @flow.location. Please try again." - }, - "type": "reply" - } - ] - }, - { - "y": 234, - "x": 116, - "destination": null, - "uuid": "0b18b474-00ab-40a0-af25-5d7c91aa64d7", - "actions": [ - { - "msg": { - "eng": "Thanks for your submission. We have that as:\n\n@flow.age / @flow.gender / @flow.location" - }, - "type": "reply" - } - ] - } - ], - "last_saved": "2015-08-05T19:02:29.296446Z", - "entry": "2e6aaa75-ffb7-4c48-baee-57e4149e452c", - "rule_sets": [ - { - "uuid": "d0e01dde-dcd2-43e9-8f8e-ae1699a80395", - "webhook_action": null, - "rules": [ - { - "category": { - "base": "All Responses", - "eng": "All Responses" - }, - "uuid": "bb0c523f-d216-4bf3-8794-664a9d9b3ccb", - "destination": "b7563d6f-279a-4b19-bff6-0ee3ccfa5d5f", - "destination_type": "R", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": null, - "ruleset_type": "wait_message", - "label": "Message Form", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 117, - "x": 459, - "config": {} - }, - { - "uuid": "b7563d6f-279a-4b19-bff6-0ee3ccfa5d5f", - "webhook_action": null, - "rules": [ - { - "test": { - "max": "100", - "type": "between", - "min": "0" - }, - "category": { - "base": "0 - 100", - "eng": "0 - 100" - }, - "destination": "eb669471-fadf-489b-9ce6-c10bb4add673", - "uuid": "a9c7276e-2f5d-4e6d-9efd-2b8d39c3ec50", - "destination_type": "R" - }, - { - "category": { - "base": "Other", - "eng": "Other" - }, - "uuid": "83ce1500-c6e6-4eb1-8feb-76cd439c6e36", - "destination": "9f1c79ae-581a-45ff-a9ea-4096f8231aad", - "destination_type": "A", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": null, - "ruleset_type": "form_field", - "label": "Age", - "operand": "@flow.message_form", - "finished_key": null, - "response_type": "", - "y": 226, - "x": 460, - "config": { - "field_delimiter": " ", - "field_index": 0 - } - }, - { - "uuid": "eb669471-fadf-489b-9ce6-c10bb4add673", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "eng": "Male m" - }, - "base": "Male m", - "type": "contains_any" - }, - "category": { - "base": "Male", - "eng": "Male" - }, - "destination": "3d8c9a32-fe04-4b95-9b2c-c66dd4ff2b24", - "uuid": "bda1d6b0-2b8d-4ddb-a888-3e41b3243a0f", - "destination_type": "R" - }, - { - "test": { - "test": { - "eng": "Female f" - }, - "base": "Female f", - "type": "contains_any" - }, - "category": { - "base": "Female", - "eng": "Female" - }, - "destination": "3d8c9a32-fe04-4b95-9b2c-c66dd4ff2b24", - "uuid": "4b8421ab-209f-4638-a267-82c4f83c73b2", - "destination_type": "R" - }, - { - "category": { - "base": "Other", - "eng": "Other" - }, - "uuid": "b61d7b97-21ab-4df5-a475-d16122aba572", - "destination": "1cc063a7-afea-460d-b8a0-c8c2a2e37e35", - "destination_type": "A", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": null, - "ruleset_type": "form_field", - "label": "Gender", - "operand": "@flow.message_form", - "finished_key": null, - "response_type": "", - "y": 344, - "x": 385, - "config": { - "field_delimiter": " ", - "field_index": 1 - } - }, - { - "uuid": "3d8c9a32-fe04-4b95-9b2c-c66dd4ff2b24", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "eng": "seattle chicago miami" - }, - "base": "seattle chicago miami", - "type": "contains_any" - }, - "category": { - "base": "Valid", - "eng": "Valid" - }, - "destination": "0b18b474-00ab-40a0-af25-5d7c91aa64d7", - "uuid": "1b36cb64-b0ce-43cc-9b50-8f45f29c9643", - "destination_type": "A" - }, - { - "category": { - "base": "Other", - "eng": "Other" - }, - "uuid": "b9419b3c-0cd0-4956-93ac-b6fd0da2964a", - "destination": "6be94ef5-bffc-4864-bd71-8e7cd87d7178", - "destination_type": "A", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": null, - "ruleset_type": "form_field", - "label": "Location", - "operand": "@flow.message_form", - "finished_key": null, - "response_type": "", - "y": 486, - "x": 366, - "config": { - "field_delimiter": " ", - "field_index": 2 - } - } - ], - "metadata": {} - }, - "expires": 10080, - "id": 34393, - "flow_type": "F", - "name": "SMS Form" - } - ], - "triggers": [] -} diff --git a/media/test_flows/start_missing_flow.json b/media/test_flows/start_missing_flow.json deleted file mode 100644 index 3dc5f4ee8bb..00000000000 --- a/media/test_flows/start_missing_flow.json +++ /dev/null @@ -1,164 +0,0 @@ -{ - "campaigns": [], - "version": 4, - "site": "http://textit.in", - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "10cca2b1-f587-410b-b07d-10ef75df3590", - "uuid": "3839c698-832f-4584-88c8-f57bb1a6ef5a", - "actions": [ - { - "msg": { - "eng": "Hi there, would you like to start a flow?" - }, - "type": "reply" - } - ] - }, - { - "y": 160, - "x": 90, - "destination": null, - "uuid": "418d4ec8-976e-4f4e-aea8-28147bb93ae1", - "actions": [ - { - "type": "flow", - "name": "Missing Flow", - "id": 27122 - } - ] - }, - { - "y": 233, - "x": 395, - "destination": null, - "uuid": "6e8d145c-1b20-477c-a839-f703eeafe1fa", - "actions": [ - { - "name": "Missing Flow", - "contacts": [], - "variables": [ - { - "id": "@step.contact.tel" - } - ], - "groups": [], - "type": "trigger-flow", - "id": 27122 - } - ] - }, - { - "y": 145, - "x": 731, - "destination": null, - "uuid": "a56641a9-c62c-4361-8960-fa2a03b5757a", - "actions": [ - { - "msg": { - "eng": "This actionset should stay." - }, - "type": "reply" - }, - { - "type": "flow", - "name": "Missing Flow", - "id": 27122 - } - ] - } - ], - "last_saved": "2015-03-16T18:04:39.520660Z", - "entry": "3839c698-832f-4584-88c8-f57bb1a6ef5a", - "rule_sets": [ - { - "uuid": "10cca2b1-f587-410b-b07d-10ef75df3590", - "webhook_action": null, - "rules": [ - { - "test": { - "test": { - "eng": "Yes" - }, - "base": "Yes", - "type": "contains_any" - }, - "category": { - "base": "Yes", - "eng": "Yes" - }, - "destination": "418d4ec8-976e-4f4e-aea8-28147bb93ae1", - "config": { - "type": "contains_any", - "verbose_name": "has any of these words", - "name": "Contains any", - "localized": true, - "operands": 1 - }, - "uuid": "53de7473-1439-40fa-9c08-25a609264416" - }, - { - "test": { - "test": { - "eng": "No" - }, - "base": "No", - "type": "contains_any" - }, - "category": { - "base": "No", - "eng": "No" - }, - "destination": "6e8d145c-1b20-477c-a839-f703eeafe1fa", - "config": { - "type": "contains_any", - "verbose_name": "has any of these words", - "name": "Contains any", - "localized": true, - "operands": 1 - }, - "uuid": "dda639b2-f775-47c2-9f4d-fa5e35f79839" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses", - "eng": "Other" - }, - "destination": "a56641a9-c62c-4361-8960-fa2a03b5757a", - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - }, - "uuid": "942d62cd-9f56-4d06-bde0-6816989a41f0" - } - ], - "webhook": null, - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "C", - "y": 77, - "x": 361 - } - ], - "metadata": {} - }, - "id": 2800, - "flow_type": "F", - "name": "Start Missing Flow" - } - ], - "triggers": [] -} diff --git a/media/test_flows/start_missing_flow_from_actionset.json b/media/test_flows/start_missing_flow_from_actionset.json deleted file mode 100644 index 6e122c2eba2..00000000000 --- a/media/test_flows/start_missing_flow_from_actionset.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "campaigns": [], - "version": 6, - "site": "https://textit.in", - "flows": [ - { - "definition": { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": "53ab0927-f33b-46ab-a32c-50cd144cc9e7", - "uuid": "68df3ab0-a39d-48b1-81c6-72cc73c86f2f", - "actions": [ - { - "msg": { - "eng": "This is the first message." - }, - "type": "reply" - } - ] - }, - { - "y": 126, - "x": 272, - "destination": null, - "uuid": "53ab0927-f33b-46ab-a32c-50cd144cc9e7", - "actions": [ - { - "type": "flow", - "name": "Missing Flow", - "id": 35582 - } - ] - } - ], - "last_saved": "2015-10-12T21:21:09.106022Z", - "entry": "68df3ab0-a39d-48b1-81c6-72cc73c86f2f", - "rule_sets": [], - "metadata": {} - }, - "expires": 10080, - "id": 35583, - "flow_type": "F", - "name": "Start Missing Flow" - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/substitution.json b/media/test_flows/substitution.json deleted file mode 100644 index 995b40c8f9e..00000000000 --- a/media/test_flows/substitution.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "rule_sets": [ - { - "uuid": "6c67aed5-a7ac-472f-903e-4eb1d43f913e", - "response_type": "C", - "rules": [ - { - "test": { - "type": "phone" - }, - "category": "phone", - "destination": "96c41cd1-b177-4e4e-b1bc-3359588be10b", - "uuid": "50f988f0-8401-4d24-82c4-165c474e9cca" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": "Other", - "destination": "77663a2e-cb80-46dd-9fed-944514301bf4", - "uuid": "b83a7dcc-122e-4164-8334-23e5837e0bfe" - } - ], - "label": "Phone", - "operand": "@step.value", - "y": 207, - "x": 276 - } - ], - "entry": "632bd152-98c6-4b83-8a5d-0f9343fcf884", - "action_sets": [ - { - "y": 351, - "x": 175, - "destination": null, - "uuid": "96c41cd1-b177-4e4e-b1bc-3359588be10b", - "actions": [ - { - "msg": "Thanks, you typed @flow.phone", - "type": "reply" - }, - { - "msg": "Hi from @step.contact! Your phone is @contact.tel.", - "variables": [ - { - "id": "@flow.phone", - "name": "@flow.phone" - } - ], - "type": "send", - "groups": [], - "contacts": [] - } - ] - }, - { - "y": 309, - "x": 576, - "destination": "6c67aed5-a7ac-472f-903e-4eb1d43f913e", - "uuid": "77663a2e-cb80-46dd-9fed-944514301bf4", - "actions": [ - { - "msg": "Sorry, that isn't a valid phone.", - "type": "reply" - } - ] - }, - { - "y": 0, - "x": 100, - "destination": "6c67aed5-a7ac-472f-903e-4eb1d43f913e", - "uuid": "632bd152-98c6-4b83-8a5d-0f9343fcf884", - "actions": [ - { - "msg": "Hi @step.contact, what is your phone number?", - "type": "reply" - } - ] - } - ], - "metadata": { - "notes": [] - } - }, - "flow_type": "F", - "name": "Substitution", - "id": 2900 - } - ], - "triggers": [] -} diff --git a/media/test_flows/test_db.json b/media/test_flows/test_db.json deleted file mode 100644 index ce62910e0b5..00000000000 --- a/media/test_flows/test_db.json +++ /dev/null @@ -1,822 +0,0 @@ -{ - "version": "13", - "site": "https://textit.com", - "flows": [ - { - "_ui": { - "nodes": { - "5bac8056-d24b-4134-9620-dbc0a4b81492": { - "position": { - "left": 0, - "top": 0 - }, - "type": "execute_actions" - }, - "5ff349ab-e74a-47d3-9ada-9fe1bd99416e": { - "position": { - "left": 480, - "top": 60 - }, - "type": "execute_actions" - }, - "70e72b75-eb66-436a-a0c9-ce890ff8f537": { - "type": "wait_for_response", - "position": { - "left": 0, - "top": 120 - }, - "config": { - "cases": {} - } - }, - "b2bd251a-d241-4bb1-a60b-6caf16014eda": { - "position": { - "left": 0, - "top": 280 - }, - "type": "execute_actions" - }, - "2e539a4c-68ff-4bf7-be23-b57845d2a550": { - "position": { - "left": 520, - "top": 560 - }, - "type": "execute_actions" - }, - "34ae02f2-4cb2-4b63-8ec5-38b5c128e497": { - "position": { - "left": 80, - "top": 480 - }, - "type": "wait_for_response" - }, - "8362d6e8-6bf9-43a5-8f74-44fd0955ec75": { - "position": { - "left": 120, - "top": 680 - }, - "type": "execute_actions" - }, - "0406725d-7701-463e-86a5-88a8af1ca42a": { - "position": { - "left": 120, - "top": 900 - }, - "type": "wait_for_response" - }, - "440d670a-4ed5-46ff-9906-228e6ed498a4": { - "position": { - "left": 120, - "top": 1040 - }, - "type": "execute_actions" - }, - "60d2ee45-5570-4ab9-8dfd-8d512732f765": { - "position": { - "left": 260, - "top": 300 - }, - "type": "execute_actions" - } - }, - "stickies": {} - }, - "expire_after_minutes": 720, - "language": "und", - "localization": {}, - "name": "Favorites", - "nodes": [ - { - "actions": [ - { - "text": "What is your favorite color?", - "type": "send_msg", - "uuid": "cbcd7a22-2835-4ef9-889d-1a0ae9c9293e", - "quick_replies": [] - } - ], - "exits": [ - { - "destination_uuid": "70e72b75-eb66-436a-a0c9-ce890ff8f537", - "uuid": "79c8dfd7-bfc9-47a9-a39a-0daba71b7e47" - } - ], - "uuid": "5bac8056-d24b-4134-9620-dbc0a4b81492" - }, - { - "actions": [ - { - "text": "I don't know that color. Try again.", - "type": "send_msg", - "uuid": "557efd8d-1e92-4150-94cd-18b26204b23d", - "quick_replies": [] - } - ], - "exits": [ - { - "destination_uuid": "70e72b75-eb66-436a-a0c9-ce890ff8f537", - "uuid": "7bb84345-0ba5-4c63-86f8-bc02f24be7c5" - } - ], - "uuid": "5ff349ab-e74a-47d3-9ada-9fe1bd99416e" - }, - { - "uuid": "70e72b75-eb66-436a-a0c9-ce890ff8f537", - "actions": [], - "router": { - "type": "switch", - "default_category_uuid": "1c864609-e85d-42fb-ad7c-0819825a1295", - "cases": [ - { - "arguments": [ - "Red" - ], - "type": "has_any_word", - "uuid": "0e33aa43-bc0d-47e5-8b6f-0a76154e1956", - "category_uuid": "4397bdc7-749b-4441-9e5b-299cb6405c16" - }, - { - "arguments": [ - "Green" - ], - "type": "has_any_word", - "uuid": "86e49280-ba0a-4960-a94d-315cfb2bf323", - "category_uuid": "95b43ef3-9e17-453c-8bf3-b0c92f6f2e54" - }, - { - "arguments": [ - "Blue" - ], - "type": "has_any_word", - "uuid": "4d9224e3-fa56-4601-b190-26a095912804", - "category_uuid": "1e95e408-b060-420a-a4c4-b9d6a1bb3ea0" - }, - { - "arguments": [ - "Navy" - ], - "type": "has_any_word", - "uuid": "e4bebbcf-cc80-4751-a0c6-e0912a815381", - "category_uuid": "1e95e408-b060-420a-a4c4-b9d6a1bb3ea0" - } - ], - "categories": [ - { - "exit_uuid": "cc45b6f3-08fd-40a8-a4d3-b910f27a98bc", - "name": "Red", - "uuid": "4397bdc7-749b-4441-9e5b-299cb6405c16" - }, - { - "exit_uuid": "832c7893-d2fb-4431-b76a-2cb948aa16c0", - "name": "Green", - "uuid": "95b43ef3-9e17-453c-8bf3-b0c92f6f2e54" - }, - { - "exit_uuid": "ad5c10d9-d5cc-4123-abe1-649a471241cc", - "name": "Blue", - "uuid": "1e95e408-b060-420a-a4c4-b9d6a1bb3ea0" - }, - { - "exit_uuid": "ddc28771-2373-4a0e-a93b-f5dbf50130b0", - "name": "Other", - "uuid": "1c864609-e85d-42fb-ad7c-0819825a1295" - }, - { - "exit_uuid": "272fe4ef-0151-479b-9710-2df520a96aa0", - "name": "No Response", - "uuid": "0ed4ec87-3146-4e51-939f-6ce04e9dc372" - } - ], - "operand": "@input.text", - "wait": { - "type": "msg", - "timeout": { - "seconds": 300, - "category_uuid": "0ed4ec87-3146-4e51-939f-6ce04e9dc372" - } - }, - "result_name": "Color" - }, - "exits": [ - { - "destination_uuid": "b2bd251a-d241-4bb1-a60b-6caf16014eda", - "uuid": "cc45b6f3-08fd-40a8-a4d3-b910f27a98bc" - }, - { - "destination_uuid": "b2bd251a-d241-4bb1-a60b-6caf16014eda", - "uuid": "832c7893-d2fb-4431-b76a-2cb948aa16c0" - }, - { - "destination_uuid": "b2bd251a-d241-4bb1-a60b-6caf16014eda", - "uuid": "ad5c10d9-d5cc-4123-abe1-649a471241cc" - }, - { - "destination_uuid": "5ff349ab-e74a-47d3-9ada-9fe1bd99416e", - "uuid": "ddc28771-2373-4a0e-a93b-f5dbf50130b0" - }, - { - "destination_uuid": "60d2ee45-5570-4ab9-8dfd-8d512732f765", - "uuid": "272fe4ef-0151-479b-9710-2df520a96aa0" - } - ] - }, - { - "actions": [ - { - "text": "Good choice, I like @results.color.category_localized too! What is your favorite beer?", - "type": "send_msg", - "uuid": "0760d41c-0b19-416e-a456-0fa07d1f9d1d", - "quick_replies": [] - } - ], - "exits": [ - { - "destination_uuid": "34ae02f2-4cb2-4b63-8ec5-38b5c128e497", - "uuid": "344bc8ef-75d6-4462-ab30-346d0e8328b1" - } - ], - "uuid": "b2bd251a-d241-4bb1-a60b-6caf16014eda" - }, - { - "actions": [ - { - "text": "Sorry you can't participate right now, I'll try again later.", - "type": "send_msg", - "uuid": "f6aa4c18-b7b6-4076-9ece-516bacf90214", - "quick_replies": [] - } - ], - "exits": [ - { - "uuid": "4c1d34db-478b-47d1-ad17-4f03a9598333" - } - ], - "uuid": "60d2ee45-5570-4ab9-8dfd-8d512732f765" - }, - { - "exits": [ - { - "destination_uuid": "8362d6e8-6bf9-43a5-8f74-44fd0955ec75", - "uuid": "7644fb28-942f-4fca-a515-3e52f2678bae" - }, - { - "destination_uuid": "8362d6e8-6bf9-43a5-8f74-44fd0955ec75", - "uuid": "165d9dce-7a8b-4d12-a633-8b82552678db" - }, - { - "destination_uuid": "8362d6e8-6bf9-43a5-8f74-44fd0955ec75", - "uuid": "d4bf9e11-f2ac-4b72-aa92-91b39fa8ba8e" - }, - { - "destination_uuid": "8362d6e8-6bf9-43a5-8f74-44fd0955ec75", - "uuid": "8eaea6ac-f9a7-490e-9aaa-f76dba22b298" - }, - { - "destination_uuid": "2e539a4c-68ff-4bf7-be23-b57845d2a550", - "uuid": "fa156c46-bd39-4bc2-91ca-c9710ad2cd5f" - } - ], - "router": { - "cases": [ - { - "arguments": [ - "Mutzig" - ], - "category_uuid": "afa671f5-8425-44be-ac8d-6c8508055739", - "type": "has_any_word", - "uuid": "dc2b1193-a214-4269-b3fd-9f20863e822d" - }, - { - "arguments": [ - "Primus" - ], - "category_uuid": "a6549d2f-38c5-4b07-be95-a599f6d468fa", - "type": "has_any_word", - "uuid": "21e62688-4e9c-4f9a-b4e7-476b87b37517" - }, - { - "arguments": [ - "Turbo King" - ], - "category_uuid": "81469d86-54c4-451a-b0b0-525f404d2b05", - "type": "has_any_word", - "uuid": "d9c60b83-fe0e-4ad6-8f48-57878f2b9185" - }, - { - "arguments": [ - "Skol" - ], - "category_uuid": "b98bdd8f-97d5-4fc5-b7dc-d352e467f8f1", - "type": "has_any_word", - "uuid": "35bfdcd2-45e9-483c-aa2b-3b2dc61e60f6" - } - ], - "categories": [ - { - "exit_uuid": "7644fb28-942f-4fca-a515-3e52f2678bae", - "name": "Mutzig", - "uuid": "afa671f5-8425-44be-ac8d-6c8508055739" - }, - { - "exit_uuid": "165d9dce-7a8b-4d12-a633-8b82552678db", - "name": "Primus", - "uuid": "a6549d2f-38c5-4b07-be95-a599f6d468fa" - }, - { - "exit_uuid": "d4bf9e11-f2ac-4b72-aa92-91b39fa8ba8e", - "name": "Turbo King", - "uuid": "81469d86-54c4-451a-b0b0-525f404d2b05" - }, - { - "exit_uuid": "8eaea6ac-f9a7-490e-9aaa-f76dba22b298", - "name": "Skol", - "uuid": "b98bdd8f-97d5-4fc5-b7dc-d352e467f8f1" - }, - { - "exit_uuid": "fa156c46-bd39-4bc2-91ca-c9710ad2cd5f", - "name": "Other", - "uuid": "ebe96e2f-8a66-4974-848e-6524b0e8893b" - } - ], - "default_category_uuid": "ebe96e2f-8a66-4974-848e-6524b0e8893b", - "operand": "@input", - "result_name": "Beer", - "type": "switch", - "wait": { - "type": "msg" - } - }, - "uuid": "34ae02f2-4cb2-4b63-8ec5-38b5c128e497", - "actions": [] - }, - { - "actions": [ - { - "text": "I don't know that one, try again please.", - "type": "send_msg", - "uuid": "75bf7db1-1cb9-4c63-8936-4691f08ba1e1", - "quick_replies": [] - } - ], - "exits": [ - { - "destination_uuid": "34ae02f2-4cb2-4b63-8ec5-38b5c128e497", - "uuid": "72774a1d-858f-498c-ad69-d04bb49af876" - } - ], - "uuid": "2e539a4c-68ff-4bf7-be23-b57845d2a550" - }, - { - "actions": [ - { - "text": "Mmmmm... delicious @results.beer.category_localized. If only they made @(lower(results.color)) @results.beer.category_localized! Lastly, what is your name?", - "type": "send_msg", - "uuid": "5d6c182b-f5d9-4ac9-be02-81337b73c503", - "quick_replies": [] - } - ], - "exits": [ - { - "destination_uuid": "0406725d-7701-463e-86a5-88a8af1ca42a", - "uuid": "6a28d354-a156-45db-8ae9-e4fe67c263a5" - } - ], - "uuid": "8362d6e8-6bf9-43a5-8f74-44fd0955ec75" - }, - { - "exits": [ - { - "destination_uuid": "440d670a-4ed5-46ff-9906-228e6ed498a4", - "uuid": "d740a951-37b2-4851-8fe8-406268f3eeec" - } - ], - "router": { - "cases": [], - "categories": [ - { - "exit_uuid": "d740a951-37b2-4851-8fe8-406268f3eeec", - "name": "All Responses", - "uuid": "b38fdc7f-b2e2-4dd6-acf2-68bb8e9cc84f" - } - ], - "default_category_uuid": "b38fdc7f-b2e2-4dd6-acf2-68bb8e9cc84f", - "operand": "@input", - "result_name": "Name", - "type": "switch", - "wait": { - "type": "msg" - } - }, - "uuid": "0406725d-7701-463e-86a5-88a8af1ca42a", - "actions": [] - }, - { - "actions": [ - { - "text": "Thanks @results.name, we are all done!", - "type": "send_msg", - "uuid": "1d09b2b7-8fc5-48ca-8c69-70a7f4e4ba0b", - "quick_replies": [] - }, - { - "uuid": "90a1eb56-bf99-42ba-82c8-e4bfcaf738d7", - "type": "set_contact_name", - "name": "@results.name" - } - ], - "exits": [ - { - "uuid": "ac30d632-1389-45c2-8b36-a5394feadf7f" - } - ], - "uuid": "440d670a-4ed5-46ff-9906-228e6ed498a4" - } - ], - "spec_version": "13.5.0", - "type": "messaging", - "uuid": "4fad232a-ca3a-4da7-be93-21492d407a33", - "revision": 38 - }, - { - "name": "Support", - "uuid": "de428d9c-3f63-4c66-bfa0-0c67e65aed66", - "spec_version": "13.5.0", - "language": "eng", - "type": "messaging", - "nodes": [ - { - "uuid": "6d796df5-0e15-4d93-98ee-0d4ffa78adef", - "actions": [ - { - "attachments": [], - "text": "Hi there, thanks for reaching out. Please give me as much detail as possible and I'll make sure we get somebody over to you right away!", - "type": "send_msg", - "quick_replies": [], - "uuid": "f05569c5-7a83-49d5-a09f-cf0d24780fae" - } - ], - "exits": [ - { - "uuid": "59fc2ad8-1728-4ff4-a9b3-cd10eac8da32", - "destination_uuid": "d634053c-c012-42d6-97d3-e18d4d8499d7" - } - ] - }, - { - "uuid": "d634053c-c012-42d6-97d3-e18d4d8499d7", - "actions": [ - { - "uuid": "b0a8d89b-b278-4129-bb41-d3f7f5075e02", - "type": "open_ticket", - "body": "", - "topic": { - "uuid": "ba121ac9-e7ff-4ef7-bf62-af81a6511f5a", - "name": "General", - "counts": { - "open": 0, - "closed": 0 - }, - "system": true, - "created_on": "2024-05-01T22:45:59.841309Z" - }, - "assignee": null, - "result_name": "Result" - } - ], - "router": { - "type": "switch", - "operand": "@results.result", - "cases": [ - { - "uuid": "0d7e136d-4d52-410f-861a-8e900468b145", - "type": "has_category", - "arguments": [ - "Success" - ], - "category_uuid": "f0652a4e-7b3f-4a68-b565-010d18386c19" - } - ], - "categories": [ - { - "uuid": "f0652a4e-7b3f-4a68-b565-010d18386c19", - "name": "Success", - "exit_uuid": "6166dc7f-b343-4f1a-879f-b03e75fa4766" - }, - { - "uuid": "0af54548-598d-40ef-8e68-20bad9d7c03e", - "name": "Failure", - "exit_uuid": "0068e8de-10e3-4bd7-9bf6-57294dccfbc9" - } - ], - "default_category_uuid": "0af54548-598d-40ef-8e68-20bad9d7c03e" - }, - "exits": [ - { - "uuid": "6166dc7f-b343-4f1a-879f-b03e75fa4766", - "destination_uuid": null - }, - { - "uuid": "0068e8de-10e3-4bd7-9bf6-57294dccfbc9", - "destination_uuid": null - } - ] - } - ], - "_ui": { - "nodes": { - "6d796df5-0e15-4d93-98ee-0d4ffa78adef": { - "position": { - "left": 20, - "top": 0 - }, - "type": "execute_actions" - }, - "d634053c-c012-42d6-97d3-e18d4d8499d7": { - "type": "split_by_ticket", - "position": { - "left": 20, - "top": 200 - }, - "config": {} - } - } - }, - "revision": 11, - "expire_after_minutes": 10080, - "localization": {} - }, - { - "name": "New Chat", - "uuid": "5fe7d119-9fca-41f4-adde-a4171301152f", - "spec_version": "13.5.0", - "language": "eng", - "type": "messaging", - "nodes": [ - { - "uuid": "f805ec81-be27-4d1d-bd1a-a513df850235", - "actions": [ - { - "attachments": [], - "text": "\ud83d\udc4b Welcome! Thanks for visiting our web page. Is there any thing I can answer for you?", - "type": "send_msg", - "quick_replies": [], - "uuid": "a0610c91-5e1c-43dd-bb31-88ce650d56ae" - } - ], - "exits": [ - { - "uuid": "941a4343-6eee-439f-9c1d-77cb81cbf78f", - "destination_uuid": "6fe16e87-40ea-46dd-8975-21e734270b86" - } - ] - }, - { - "uuid": "6fe16e87-40ea-46dd-8975-21e734270b86", - "actions": [], - "router": { - "type": "switch", - "default_category_uuid": "53c5cee4-963c-47d0-92fc-817647758d6b", - "cases": [ - { - "arguments": [ - "yes" - ], - "type": "has_any_word", - "uuid": "c14c8f2f-74bc-46ed-87d6-e5676a4baf73", - "category_uuid": "be9f6f8d-7401-46df-85ba-a929f5e36430" - }, - { - "arguments": [ - "no" - ], - "type": "has_any_word", - "uuid": "3b2875fe-ce4c-4ece-b518-af875b3bcef3", - "category_uuid": "8b1387c9-319c-4d21-851a-489404324390" - } - ], - "categories": [ - { - "uuid": "be9f6f8d-7401-46df-85ba-a929f5e36430", - "name": "Yes", - "exit_uuid": "606b7207-eb1c-4f7b-a0ee-12c7da8d0e7b" - }, - { - "uuid": "8b1387c9-319c-4d21-851a-489404324390", - "name": "No", - "exit_uuid": "9bcc3740-5fee-4829-84be-fe85f9498792" - }, - { - "uuid": "53c5cee4-963c-47d0-92fc-817647758d6b", - "name": "Other", - "exit_uuid": "5389008f-d3c7-4d86-97bb-952c116f6762" - } - ], - "operand": "@input.text", - "wait": { - "type": "msg" - }, - "result_name": "Result 1" - }, - "exits": [ - { - "uuid": "606b7207-eb1c-4f7b-a0ee-12c7da8d0e7b", - "destination_uuid": "81f59084-b989-4bfc-9fab-b944c65647c2" - }, - { - "uuid": "9bcc3740-5fee-4829-84be-fe85f9498792", - "destination_uuid": "dcf59feb-82d7-4eae-a63f-23905e8961b3" - }, - { - "uuid": "5389008f-d3c7-4d86-97bb-952c116f6762", - "destination_uuid": "81f59084-b989-4bfc-9fab-b944c65647c2" - } - ] - }, - { - "uuid": "dcf59feb-82d7-4eae-a63f-23905e8961b3", - "actions": [ - { - "attachments": [], - "text": "Hey, no problem, if you need anything, you know where to find me!", - "type": "send_msg", - "quick_replies": [], - "uuid": "31e3b9fc-719f-4ed1-8f68-4e9d6d10a9ea" - } - ], - "exits": [ - { - "uuid": "95c0c45b-effe-4350-bc7d-d128680925a5", - "destination_uuid": null - } - ] - }, - { - "uuid": "81f59084-b989-4bfc-9fab-b944c65647c2", - "actions": [ - { - "attachments": [], - "text": "Ok, please add as much additional detail as possible and I'll get somebody over to help.", - "type": "send_msg", - "quick_replies": [], - "uuid": "3f3dc468-043f-4386-93e2-c448428c8f38" - } - ], - "exits": [ - { - "uuid": "82398b8f-df44-4069-af05-f8a41f3056ff", - "destination_uuid": "f43743df-3d6c-4ae2-8f24-c6bef3e999fc" - } - ] - }, - { - "uuid": "f43743df-3d6c-4ae2-8f24-c6bef3e999fc", - "actions": [ - { - "uuid": "8b26cc8c-85d7-490c-9494-5986141a751c", - "type": "open_ticket", - "body": "", - "topic": { - "uuid": "ba121ac9-e7ff-4ef7-bf62-af81a6511f5a", - "name": "General", - "counts": { - "open": 0, - "closed": 0 - }, - "system": true, - "created_on": "2024-05-01T22:45:59.841309Z" - }, - "assignee": null, - "result_name": "Result" - } - ], - "router": { - "type": "switch", - "operand": "@results.result", - "cases": [ - { - "uuid": "7e7ac0f5-1249-484d-8774-eaffced73e4d", - "type": "has_category", - "arguments": [ - "Success" - ], - "category_uuid": "380bb0c4-913f-45e0-806c-f1e6ea75933e" - } - ], - "categories": [ - { - "uuid": "380bb0c4-913f-45e0-806c-f1e6ea75933e", - "name": "Success", - "exit_uuid": "f36e4bb3-5f73-4334-b25a-e4f43e0f1955" - }, - { - "uuid": "d43e8367-cd56-4063-88cc-5be83643c0d9", - "name": "Failure", - "exit_uuid": "797d6c62-34c0-4486-96e3-a7a2808c8784" - } - ], - "default_category_uuid": "d43e8367-cd56-4063-88cc-5be83643c0d9" - }, - "exits": [ - { - "uuid": "f36e4bb3-5f73-4334-b25a-e4f43e0f1955", - "destination_uuid": null - }, - { - "uuid": "797d6c62-34c0-4486-96e3-a7a2808c8784", - "destination_uuid": null - } - ] - } - ], - "_ui": { - "nodes": { - "f805ec81-be27-4d1d-bd1a-a513df850235": { - "position": { - "left": 0, - "top": 0 - }, - "type": "execute_actions" - }, - "6fe16e87-40ea-46dd-8975-21e734270b86": { - "type": "wait_for_response", - "position": { - "left": 0, - "top": 160 - }, - "config": { - "cases": {} - } - }, - "dcf59feb-82d7-4eae-a63f-23905e8961b3": { - "position": { - "left": 300, - "top": 320 - }, - "type": "execute_actions" - }, - "81f59084-b989-4bfc-9fab-b944c65647c2": { - "position": { - "left": 0, - "top": 360 - }, - "type": "execute_actions" - }, - "f43743df-3d6c-4ae2-8f24-c6bef3e999fc": { - "type": "split_by_ticket", - "position": { - "left": 0, - "top": 540 - }, - "config": {} - } - } - }, - "revision": 72, - "expire_after_minutes": 10080, - "localization": {} - } - ], - "campaigns": [], - "triggers": [ - { - "trigger_type": "K", - "flow": { - "uuid": "4fad232a-ca3a-4da7-be93-21492d407a33", - "name": "Favorites" - }, - "groups": [], - "exclude_groups": [], - "channel": null, - "keywords": [ - "fav" - ], - "match_type": "F" - }, - { - "trigger_type": "N", - "flow": { - "uuid": "5fe7d119-9fca-41f4-adde-a4171301152f", - "name": "New Chat" - }, - "groups": [], - "exclude_groups": [], - "channel": null - }, - { - "trigger_type": "K", - "flow": { - "uuid": "de428d9c-3f63-4c66-bfa0-0c67e65aed66", - "name": "Support" - }, - "groups": [], - "exclude_groups": [], - "channel": null, - "keywords": [ - "help" - ], - "match_type": "F" - } - ], - "fields": [], - "groups": [] -} \ No newline at end of file diff --git a/media/test_flows/too_old.json b/media/test_flows/too_old.json deleted file mode 100644 index ce8daaadbbf..00000000000 --- a/media/test_flows/too_old.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "campaigns": [], - "version": 2, - "site": "http://rapidpro.io", - "flows": [ - { - "definition": { - "rule_sets": [], - "entr": "d2ed3c4c-c4d0-4f54-b0b0-6eaa5afd33f8", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "d2ed3c4c-c4d0-4f54-b0b0-6eaa5afd33f8", - "actions": [ - { - "msg": "You've been registered as a new mother, congratulations on having sex at least once. Your CHW is @extra.contact.phone", - "type": "reply" - }, - { - "field": "chw", - "type": "save", - "value": "@extra.contact.tel_e164", - "label": "CHW" - }, - { - "field": "expected_delivery_date", - "type": "save", - "value": "@extra.flow.edd", - "label": "Expected Delivery Date" - }, - { - "field": "name", - "type": "save", - "value": "@extra.flow.name", - "label": "Contact Name" - }, - { - "type": "add_group", - "groups": [ - "Expecting Mothers" - ] - } - ] - } - ], - "metadata": { - "notes": [] - } - }, - "flow_type": "F", - "name": "New Mother", - "id": 1500 - } - ], - "triggers": [] -} diff --git a/media/test_flows/triggered.json b/media/test_flows/triggered.json deleted file mode 100644 index 87f3505df1e..00000000000 --- a/media/test_flows/triggered.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "campaigns": [], - "version": 8, - "site": "http://rapidpro.io", - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "7b99bb2e-054f-4a60-a986-367ecf114879", - "actions": [ - { - "msg": { - "eng": "Honey, I triggered the flow! @extra.text" - }, - "type": "reply" - } - ] - } - ], - "version": 8, - "flow_type": "F", - "entry": "7b99bb2e-054f-4a60-a986-367ecf114879", - "rule_sets": [], - "metadata": { - "expires": 10080, - "saved_on": "2016-07-21T16:34:32.457154Z", - "id": 25994, - "name": "Triggeree", - "revision": 1 - } - }, - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "06a3be8b-36f9-4f73-b31a-95a1e8ee920d", - "actions": [ - { - "name": "Triggeree", - "contacts": [ - { - "name": "Marshawn", - "id": contact_id - } - ], - "variables": [], - "groups": [], - "type": "trigger-flow", - "id": 25994 - }, - { - "name": "Triggeree", - "type": "flow", - "id": 25994 - } - ] - } - ], - "version": 8, - "flow_type": "F", - "entry": "98d0948b-c50d-4033-b07c-403d324aa147", - "rule_sets": [{ - "uuid": "98d0948b-c50d-4033-b07c-403d324aa147", - "webhook_action": "GET", - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "eng": "All Responses" - }, - "destination": "06a3be8b-36f9-4f73-b31a-95a1e8ee920d", - "uuid": "1e89ff33-80fe-4d34-9ced-3b96f5aacd50", - "destination_type": "A" - } - ], - "webhook": "http://localhost:49999/where", - "ruleset_type": "webhook", - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 0, - "x": 328, - "config": {} - }], - "metadata": { - "expires": 10080, - "revision": 1, - "id": 25995, - "name": "Triggerer", - "saved_on": "2016-07-21T16:35:05.717556Z" - } - } - ], - "triggers": [] -} diff --git a/media/test_flows/triggered_flow.json b/media/test_flows/triggered_flow.json deleted file mode 100644 index cfe6901a54e..00000000000 --- a/media/test_flows/triggered_flow.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "campaigns": [], - "version": 3, - "site": "http://rapdipro.io", - "flows": [ - { - "definition": { - "entry": "4ec3d47a-eef3-4d80-b5b1-38dab8e518dc", - "rule_sets": [], - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "4ec3d47a-eef3-4d80-b5b1-38dab8e518dc", - "actions": [ - { - "msg": "This is the triggered flow", - "type": "reply" - } - ] - } - ], - "metadata": { - "notes": [] - } - }, - "id": 12140, - "flow_type": "F", - "name": "Triggered Flow" - }, - { - "definition": { - "entry": "b6c4d782-5165-4541-bb2e-7348c9676882", - "rule_sets": [], - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "b6c4d782-5165-4541-bb2e-7348c9676882", - "actions": [ - { - "name": "Triggered Flow", - "contacts": [], - "variables": [], - "groups": [ - { - "name": "Survey Audience", - "id": 6250 - } - ], - "type": "trigger-flow", - "id": 12140 - } - ] - } - ], - "metadata": { - "notes": [] - } - }, - "id": 12141, - "flow_type": "F", - "name": "Trigger a Flow" - } - ], - "triggers": [] -} diff --git a/media/test_flows/two_to_all.json b/media/test_flows/two_to_all.json deleted file mode 100644 index b9d3e9e2b66..00000000000 --- a/media/test_flows/two_to_all.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "campaigns": [], - "version": 10, - "site": "https://app.rapidpro.io", - "flows": [ - { - "base_language": "eng", - "action_sets": [ - { - "y": 0, - "x": 100, - "destination": null, - "uuid": "a42b6981-1b8c-44a9-8260-3bf504c9bb25", - "actions": [ - { - "msg": { - "eng": "first message" - }, - "media": {}, - "send_all": true, - "type": "reply" - }, - { - "msg": { - "eng": "second message" - }, - "media": {}, - "send_all": true, - "type": "reply" - } - ] - } - ], - "version": 10, - "flow_type": "F", - "entry": "a42b6981-1b8c-44a9-8260-3bf504c9bb25", - "rule_sets": [], - "metadata": { - "expires": 10080, - "revision": 3, - "uuid": "a9de95b7-2959-40b7-afdd-99ef1975b812", - "name": "Two to all", - "saved_on": "2017-03-17T14:27:29.032085Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/type_flow.json b/media/test_flows/type_flow.json deleted file mode 100644 index 7675ff831ca..00000000000 --- a/media/test_flows/type_flow.json +++ /dev/null @@ -1,394 +0,0 @@ -{ - "campaigns": [], - "version": "10.1", - "site": "https://app.rapidpro.io", - "flows": [ - { - "base_language": "base", - "action_sets": [ - { - "y": 0, - "x": 92, - "destination": "9c941ba5-e4df-47e0-9a4f-594986ae1b1a", - "uuid": "bc3da5f2-6fe5-41f1-ac0e-ec2701189ef2", - "actions": [ - { - "msg": { - "base": "Hey @contact.nickname, you joined on @contact.joined_on in @contact.district." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "4dc98ff5-8d86-45f5-8336-8949029e893e" - }, - { - "msg": { - "base": "It's @date. The time is @date.now on @date.today." - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "058e5d4a-3447-49d9-a033-ebe3010b5875" - }, - { - "msg": { - "base": "Send text" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "9568e1c8-04f2-45ef-a477-4521d19bfaf6" - } - ] - }, - { - "y": 257, - "x": 78, - "destination": "a4904b78-08b8-42fd-9479-27bcb1764bc4", - "uuid": "dac0c91f-3f3f-43d5-a2d9-5c1059998134", - "actions": [ - { - "msg": { - "base": "You said @flow.text at @flow.text.time. Send date" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "6f4fc213-3037-49e5-ac45-b956c48fd546" - } - ] - }, - { - "y": 540, - "x": 95, - "destination": "9994619b-e68d-4c94-90d6-af19fb944f7d", - "uuid": "9bbdc63c-4385-44e1-b573-a127f50d3d34", - "actions": [ - { - "msg": { - "base": "You said @flow.date which was in category @flow.date.category Send number" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "7177ef30-33ca-4b25-8af7-3213e0483b56" - } - ] - }, - { - "y": 825, - "x": 96, - "destination": "01cc820b-c516-4e68-8903-aa69866b11b6", - "uuid": "a4a37023-de22-4ac4-b431-da2a333c93cd", - "actions": [ - { - "msg": { - "base": "You said @flow.number. Send state" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "34d622bc-e2ad-44aa-b047-cfb38e2dc2cc" - } - ] - }, - { - "y": 1084, - "x": 94, - "destination": "9769918c-8ca4-4ec5-8b5b-bf94cc6746a9", - "uuid": "7e8dfcd5-6510-4060-9608-2c8faa3a8e0a", - "actions": [ - { - "msg": { - "base": "You said @flow.state which was in category @flow.state.category. Send district" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "a4428571-9b86-49b8-97e1-6ffee3cddbaa" - } - ] - }, - { - "y": 1460, - "x": 73, - "destination": "ea2244de-7b23-4fbb-8f99-38cde3100de8", - "uuid": "605e2fe7-321a-4cce-b97b-877d75bd3b12", - "actions": [ - { - "msg": { - "base": "You said @flow.district. Send ward" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "5f8eb5aa-249b-4718-a502-8406dd0ae418" - } - ] - }, - { - "y": 1214, - "x": 284, - "destination": "498b1953-02f1-47dd-b9cb-1b51913e348f", - "uuid": "9769918c-8ca4-4ec5-8b5b-bf94cc6746a9", - "actions": [ - { - "msg": { - "base": "You said @flow.ward.", - "fre": "Tu as dit @flow.ward" - }, - "media": {}, - "send_all": false, - "type": "reply", - "uuid": "b95b88c8-a85c-4bac-931d-310d678c286a" - }, - { - "lang": "fre", - "type": "lang", - "name": "French", - "uuid": "56a4bca5-b9e5-4d04-883c-ca65d7c4d538" - } - ] - } - ], - "version": "10.1", - "flow_type": "F", - "entry": "bc3da5f2-6fe5-41f1-ac0e-ec2701189ef2", - "rule_sets": [ - { - "uuid": "9c941ba5-e4df-47e0-9a4f-594986ae1b1a", - "rules": [ - { - "category": { - "base": "All Responses" - }, - "uuid": "a4682f52-7869-4e64-bf9f-8d2c0a341d19", - "destination": "dac0c91f-3f3f-43d5-a2d9-5c1059998134", - "label": null, - "destination_type": "A", - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "Text", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 146, - "x": 265, - "config": {} - }, - { - "uuid": "a4904b78-08b8-42fd-9479-27bcb1764bc4", - "rules": [ - { - "category": { - "base": "is a date" - }, - "uuid": "e410616b-b5cd-4fd1-af42-9c6b6c9fe282", - "destination": "9bbdc63c-4385-44e1-b573-a127f50d3d34", - "label": null, - "destination_type": "A", - "test": { - "type": "date" - } - }, - { - "category": { - "base": "Other" - }, - "uuid": "a720d0b1-0686-47be-a306-1543e470c6de", - "destination": "dac0c91f-3f3f-43d5-a2d9-5c1059998134", - "label": null, - "destination_type": "A", - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "Date", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 391, - "x": 273, - "config": {} - }, - { - "uuid": "9994619b-e68d-4c94-90d6-af19fb944f7d", - "rules": [ - { - "category": { - "base": "numeric" - }, - "uuid": "c4881d22-57aa-4964-abbc-aaf26b875614", - "destination": "a4a37023-de22-4ac4-b431-da2a333c93cd", - "label": null, - "destination_type": "A", - "test": { - "type": "number" - } - }, - { - "category": { - "base": "Other" - }, - "uuid": "6cd3fb0c-070d-4060-bafc-badaebe5134e", - "destination": null, - "label": null, - "destination_type": null, - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "Number", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 679, - "x": 267, - "config": {} - }, - { - "uuid": "01cc820b-c516-4e68-8903-aa69866b11b6", - "rules": [ - { - "category": { - "base": "state" - }, - "uuid": "4ef398b1-d3f1-4023-b608-8803cc05dd20", - "destination": "7e8dfcd5-6510-4060-9608-2c8faa3a8e0a", - "label": null, - "destination_type": "A", - "test": { - "type": "state" - } - }, - { - "category": { - "base": "Other" - }, - "uuid": "38a4583c-cf73-454c-80e5-09910cf92f4b", - "destination": null, - "label": null, - "destination_type": null, - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "State", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 956, - "x": 271, - "config": {} - }, - { - "uuid": "498b1953-02f1-47dd-b9cb-1b51913e348f", - "rules": [ - { - "category": { - "base": "district", - "fre": "le district" - }, - "uuid": "47147597-00c6-44bc-95d2-bebec9f1a45b", - "destination": "605e2fe7-321a-4cce-b97b-877d75bd3b12", - "label": null, - "destination_type": "A", - "test": { - "test": "@flow.state", - "type": "district" - } - }, - { - "category": { - "base": "Other" - }, - "uuid": "1145c620-2512-4228-b561-80024bbd91ee", - "destination": null, - "label": null, - "destination_type": null, - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "District", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 1355, - "x": 266, - "config": {} - }, - { - "uuid": "ea2244de-7b23-4fbb-8f99-38cde3100de8", - "rules": [ - { - "category": { - "base": "ward" - }, - "uuid": "b5159826-a55a-4803-a656-64d47803e8bf", - "destination": null, - "label": null, - "destination_type": null, - "test": { - "state": "@flow.state.", - "type": "ward", - "district": "@flow.district" - } - }, - { - "category": { - "base": "Other" - }, - "uuid": "c1aa2a53-4d85-4fdd-953e-7e24b06cc7ea", - "destination": null, - "label": null, - "destination_type": null, - "test": { - "type": "true" - } - } - ], - "ruleset_type": "wait_message", - "label": "Ward", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 1584, - "x": 268, - "config": {} - } - ], - "metadata": { - "expires": 10080, - "revision": 19, - "uuid": "d7468d97-b8d7-482e-a09c-d0bfe839c555", - "name": "Type Flow", - "saved_on": "2017-10-30T19:38:39.814935Z" - } - } - ], - "triggers": [ - { - "trigger_type": "K", - "flow": { - "name": "Type Flow", - "uuid": "d7468d97-b8d7-482e-a09c-d0bfe839c555" - }, - "groups": [], - "keyword": "types", - "channel": null - } - ] -} \ No newline at end of file diff --git a/media/test_flows/ussd_example.json b/media/test_flows/ussd_example.json deleted file mode 100644 index 8f52a2ea9a7..00000000000 --- a/media/test_flows/ussd_example.json +++ /dev/null @@ -1,160 +0,0 @@ -{ - "version": 8, - "flows": [ - { - "base_language": "base", - "action_sets": [], - "version": 8, - "flow_type": "U", - "entry": "5e0fe53f-1caa-434d-97e7-189f33353372", - "rule_sets": [ - { - "uuid": "5e0fe53f-1caa-434d-97e7-189f33353372", - "webhook_action": null, - "rules": [ - { - "category": { - "base": "Sports" - }, - "test": { - "test": 1, - "type": "eq" - }, - "destination": "66aa0bb5-d1e5-4026-a056-fd22c353539e", - "uuid": "337e5e25-204b-4786-bee6-ff4c431986eb", - "destination_type": "R" - }, - { - "category": { - "base": "Politics" - }, - "test": { - "test": 2, - "type": "eq" - }, - "destination": "66aa0bb5-d1e5-4026-a056-fd22c353539e", - "uuid": "45803c40-aaf3-44d3-a301-f7eb35fa6be4", - "destination_type": "R" - }, - { - "category": { - "base": "Movies" - }, - "test": { - "test": 3, - "type": "eq" - }, - "destination": "66aa0bb5-d1e5-4026-a056-fd22c353539e", - "uuid": "13f3ed00-44d0-4119-b5fd-269c8f09fce3", - "destination_type": "R" - }, - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "Other" - }, - "destination": null, - "uuid": "6006a206-10f0-4937-a33f-7ec80deb8540" - } - ], - "webhook": null, - "ruleset_type": "wait_menu", - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 0, - "x": 624, - "config": { - "ussd_menu": [ - { - "category": { - "base": "Sports" - }, - "uuid": "337e5e25-204b-4786-bee6-ff4c431986eb", - "option": 1, - "label": "I'm interested in sports" - }, - { - "category": { - "base": "Politics" - }, - "uuid": "45803c40-aaf3-44d3-a301-f7eb35fa6be4", - "option": 2, - "label": "I'm interested in politics" - }, - { - "category": { - "base": "Movies" - }, - "uuid": "13f3ed00-44d0-4119-b5fd-269c8f09fce3", - "option": 3, - "label": "I'm interested in movies" - } - ], - "ussd_message": { - "base": "What would you like to read about?" - } - } - }, - { - "uuid": "66aa0bb5-d1e5-4026-a056-fd22c353539e", - "webhook_action": null, - "rules": [ - { - "test": { - "test": "true", - "type": "true" - }, - "category": { - "base": "All Responses" - }, - "uuid": "0df9b0ac-d241-460c-9b7c-f9f350a661bf" - } - ], - "webhook": null, - "ruleset_type": "wait_ussd", - "label": "Response 2", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 310, - "x": 725, - "config": { - "ussd_menu": [ - { - "category": { - "base": "Dfd" - }, - "uuid": "03f39461-b649-4cb0-97f9-9ce6ecb5c606", - "option": 1, - "label": "dfdf" - }, - { - "category": {}, - "uuid": "8211b1dc-b443-4b1c-8849-bf18a69e13ef", - "option": 2, - "label": "" - } - ], - "ussd_message": { - "base": "Thank you!" - } - } - } - ], - "metadata": { - "name": "USSD example", - "notes": [], - "expires": 10080, - "revision": 37, - "id": 26, - "saved_on": "2016-02-17T16:17:48.396242Z" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/webhook_rule_first.json b/media/test_flows/webhook_rule_first.json deleted file mode 100644 index 6be5a7e436b..00000000000 --- a/media/test_flows/webhook_rule_first.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "version": 7, - "flows": [ - { - "version": 7, - "base_language": "eng", - "action_sets": [ - { - "y": 140, - "x": 190, - "destination": null, - "uuid": "c81d60ec-9a74-48d6-a55f-e70a5d7195d3", - "actions": [ - { - "msg": { - "eng": "Testing this out" - }, - "type": "reply" - } - ] - } - ], - "last_saved": "2015-07-29T20:57:32.146036Z", - "entry": "9b3b6b7d-13ec-46ea-8918-a83a4099be33", - "rule_sets": [ - { - "uuid": "9b3b6b7d-13ec-46ea-8918-a83a4099be33", - "webhook_action": "GET", - "rules": [ - { - "category": { - "base": "All Responses", - "eng": "All Responses" - }, - "uuid": "0734d69a-99c1-45f6-a3df-7246408c4565", - "destination": "c81d60ec-9a74-48d6-a55f-e70a5d7195d3", - "destination_type": "A", - "test": { - "test": "true", - "type": "true" - }, - "config": { - "type": "true", - "verbose_name": "contains anything", - "name": "Other", - "operands": 0 - } - } - ], - "webhook": "http://google.com", - "ruleset_type": "webhook", - "label": "Response 1", - "operand": "@step.value", - "finished_key": null, - "response_type": "", - "y": 0, - "x": 100 - } - ], - "flow_type": "F", - "metadata": { - "expires": 10080, - "id": 33868, - "name": "Test Webhook First" - } - } - ], - "triggers": [] -} \ No newline at end of file diff --git a/media/test_flows/with_message_topic.json b/media/test_flows/with_message_topic.json deleted file mode 100644 index b99e4a0aafa..00000000000 --- a/media/test_flows/with_message_topic.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "version": "13", - "flows": [ - { - "uuid": "c07d368e-fe1d-469d-b267-bca02d17c3e3", - "name": "Topic Flow", - "spec_version": "13.0.0", - "language": "eng", - "type": "messaging", - "revision": 1, - "expire_after_minutes": 10080, - "localization": {}, - "nodes": [ - { - "uuid": "5bf98a89-d38d-40ba-8e5e-d54cad1777f1", - "actions": [ - { - "attachments": [], - "text": "This is a message with a topic.", - "type": "send_msg", - "quick_replies": [], - "uuid": "b103d563-9320-40fe-8f96-c9d3fbe0262d", - "templating": null, - "topic": "agent" - } - ], - "exits": [ - { - "uuid": "29dd720b-00ed-4304-97b0-38c65bc38e30" - } - ] - } - ], - "_ui": { - "nodes": { - "5bf98a89-d38d-40ba-8e5e-d54cad1777f1": { - "position": { - "left": 20, - "top": 40 - }, - "type": "execute_actions" - } - } - } - } - ] -} diff --git a/media/test_imports/formula_data.xlsx b/media/test_imports/formula_data.xlsx new file mode 100644 index 00000000000..ab0e53e892a Binary files /dev/null and b/media/test_imports/formula_data.xlsx differ diff --git a/media/test_imports/invalid.txt.xlsx b/media/test_imports/invalid.txt.xlsx new file mode 100644 index 00000000000..0a757197bf8 --- /dev/null +++ b/media/test_imports/invalid.txt.xlsx @@ -0,0 +1 @@ +text file \ No newline at end of file diff --git a/package.json b/package.json index 7297b7919a4..94debcd37d1 100644 --- a/package.json +++ b/package.json @@ -9,8 +9,8 @@ ] }, "dependencies": { - "@nyaruka/flow-editor": "1.35.1", - "@nyaruka/temba-components": "0.104.1", + "@nyaruka/flow-editor": "1.37.1", + "@nyaruka/temba-components": "0.113.0", "codemirror": "5.18.2", "colorette": "1.2.2", "fa-icons": "0.2.0", diff --git a/poetry.lock b/poetry.lock index cc2f118e255..c92870bf7b1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,140 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.3" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, +] + +[[package]] +name = "aiohttp" +version = "3.11.8" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "aiohttp-3.11.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d2ca685c6a851ce64e511fbcb906e4dd97d13e567ca7ecb5cb30b184e15dc6d"}, + {file = "aiohttp-3.11.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52913bb8a0a72a57479f54b281300c9d23036aa9aa3ebbc9a32a643484eadfc2"}, + {file = "aiohttp-3.11.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:35dafc70051b6cbd6dafb533b4e3f0df6225a4896be373ef86367b2987409331"}, + {file = "aiohttp-3.11.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:561b9596a9f90266673ef0b950c27e04ab597cdb53785e2ac91b83b33c31b509"}, + {file = "aiohttp-3.11.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d479c1fdcc920056a06d04059db52eb8590ecbbb3acdcaeeea26a88ff782e94a"}, + {file = "aiohttp-3.11.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ce8eb6444bb6e862feca664ce365afa8e2e32db24dcf1a502719a8a002f9274"}, + {file = "aiohttp-3.11.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df9bf08eb93611b1d4d6245b6fecf88728e90eece00e00d554e1b0c445557d83"}, + {file = "aiohttp-3.11.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a20ddaa58fea717177fac9a4a1fb8b39be868aa4fed2af6de4313b7a08f0f71"}, + {file = "aiohttp-3.11.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9f4aadfea6b48cfa17aef1a68ba6bee5a0246374f5a588e299a4f4ff5bd1c77b"}, + {file = "aiohttp-3.11.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:aa7deebb4bc5143745e6282139d7b9de50beb6d06609df64d2c993ef496bc7eb"}, + {file = "aiohttp-3.11.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fe503a76b9e3a13b62e64545693c9463afe9d429e0909120f7bb66de91ed8bc2"}, + {file = "aiohttp-3.11.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1c5838a68e31712354129add1b5fe32b06aa05275f835130edc650e6288af05f"}, + {file = "aiohttp-3.11.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:832e58d9454fe501b0d092cdf660c0e34e16005f61acd06e1c79b0fc45019c94"}, + {file = "aiohttp-3.11.8-cp310-cp310-win32.whl", hash = "sha256:00618c37a350884c08e87cf9a6532be274d564227ac49e0b474cf41f27e1f190"}, + {file = "aiohttp-3.11.8-cp310-cp310-win_amd64.whl", hash = "sha256:8eeaac75203da1a54afe1faea3c855a1973026b54929112aa9b67bceadbcb0ca"}, + {file = "aiohttp-3.11.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f8dd02b44555893adfe7cc4b3b454fee04f9dcec45cf66ef5bb53ebf393f0505"}, + {file = "aiohttp-3.11.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:658052941324edea3dee1f681375e70779f55e437e07bdfc4b5bbe65ad53cefb"}, + {file = "aiohttp-3.11.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c829471a9e2266da4a0666f8a9e215f19320f79778af379c1c7db324ac24ed2"}, + {file = "aiohttp-3.11.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d21951756690f5d86d0215da38eb0fd65def03b5e2a1c08a4a39718a6d0d48f2"}, + {file = "aiohttp-3.11.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2fa50ddc6b21cc1ae23e13524d6f75b27e279fdf5cf905b2df6fd171891ac4e2"}, + {file = "aiohttp-3.11.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5afbd805e449048ecebb1a256176e953d4ca9e48bab387d4d1c8524f1c7a95"}, + {file = "aiohttp-3.11.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea68db69f2a4ddc24b28b8e754fc0b963ed7f9b9a76137f06fe44643d6821fbd"}, + {file = "aiohttp-3.11.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b3ac163145660ce660aed2f1005e6d4de840d39728990b7250525eeec4e4a8"}, + {file = "aiohttp-3.11.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e9ac0cce897904b77e109e5403ed713187dbdf96832bfd061ac07164264be16c"}, + {file = "aiohttp-3.11.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3260c77cff4e35245bc517658bd54d7a64787f71f3c4f723877c82f22835b032"}, + {file = "aiohttp-3.11.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f7fd9c11ffad6b022bf02a41a70418cb2ab3b33f2c27842a5999e3ab78daf280"}, + {file = "aiohttp-3.11.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16bda233a7b159ab08107e8858fedca90a9de287057fab54cafde51bd83f9819"}, + {file = "aiohttp-3.11.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4867008617bbf86e9fb5b00f72dd0e3a00a579b32233caff834320867f9b7cac"}, + {file = "aiohttp-3.11.8-cp311-cp311-win32.whl", hash = "sha256:17e6b9d8e29e3bfc7f893f327e92c9769d3582cee2fb1652c1431ac3f60115a0"}, + {file = "aiohttp-3.11.8-cp311-cp311-win_amd64.whl", hash = "sha256:7f3be4961a5c2c670f31caab7641a37ea2a97031f0d8ae15bcfd36b6bf273200"}, + {file = "aiohttp-3.11.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0e3b5bfef913d6be270c81976fbc0cbf66625cd92663bbb7e03b3adbd6aa4ac6"}, + {file = "aiohttp-3.11.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb51a81cb637b9a072c9cfae1839e35c6579638861eb3479eb5d6e6ce8bc6782"}, + {file = "aiohttp-3.11.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd2ca84e5f7a35f313a62eb7d6a50bac6760b60bafce34586750712731c0aeff"}, + {file = "aiohttp-3.11.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47c6663df9446aa848b478413219600da4b54bc0409e1ac4bc80fb1a81501363"}, + {file = "aiohttp-3.11.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c665ed4b52256614858b20711bbbd2755b0e19ec86870f8ff1645acf9ae9e760"}, + {file = "aiohttp-3.11.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35d4545e7684da7a954ffc2dce495462cb16a902dffdebe98572408f6aaaee83"}, + {file = "aiohttp-3.11.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85be3899e6860dd2cd3f4370ded6708e939d00d5ec922a8eb328d114db605a47"}, + {file = "aiohttp-3.11.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ed9f1f2697713c48efc9ec483ad5d062e4aa91854f090a3eba0b19c002851d"}, + {file = "aiohttp-3.11.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c0dbae99737badf3f5e862088a118e28d3b36f03eb608a6382eddfd68178e05b"}, + {file = "aiohttp-3.11.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:beae08f900b2980af4353a0200eb162b39f276fd8a6e43079a540f83964671f4"}, + {file = "aiohttp-3.11.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d6f9e5fd1b3ecbaca3e04a15a02d1fa213248608caee99fd5bdddd4759959cf7"}, + {file = "aiohttp-3.11.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a7def89a41fe32120d89cd4577f5efbab3c52234c5890066ced8a2f7202dff88"}, + {file = "aiohttp-3.11.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:98f596cf59292e779bc387f22378a3d2c5e052c9fe2bf822ac4f547c6fe57758"}, + {file = "aiohttp-3.11.8-cp312-cp312-win32.whl", hash = "sha256:b64fa6b76b35b695cd3e5c42a4e568cbea8d41c9e59165e2a43da00976e2027e"}, + {file = "aiohttp-3.11.8-cp312-cp312-win_amd64.whl", hash = "sha256:afba47981ff73b1794c00dce774334dcfe62664b3b4f78f278b77d21ce9daf43"}, + {file = "aiohttp-3.11.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a81525430da5ca356fae6e889daeb6f5cc0d5f0cef88e59cdde48e2394ea1365"}, + {file = "aiohttp-3.11.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7565689e86a88c1d258351ebd14e343337b76a56ca5c0a2c1db96ec28149386f"}, + {file = "aiohttp-3.11.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d0f9dbe9763c014c408ad51a027dc9582518e992dc63e2ffe359ac1b4840a560"}, + {file = "aiohttp-3.11.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca580edc3ccd7f6ea76ad9cf59f5a8756d338e770b5eda7be26bcda8fa7ef53"}, + {file = "aiohttp-3.11.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d141631a7348038fc7b5d1a81b3c9afa9aa056188ded7902fe754028fdea5c5"}, + {file = "aiohttp-3.11.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64e6b14608a56a4c76c60daac730b0c0eeaf9d10dfc3231f7fc26521a0d628fd"}, + {file = "aiohttp-3.11.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0983d0ce329f2f9dbeb355c3744bd6333f34e0dc56025b6b7d4f285b90acb51e"}, + {file = "aiohttp-3.11.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d96b93a46a3742880fa21bcb35c6c40cf27714ec0fb8ec85fe444d73b95131b9"}, + {file = "aiohttp-3.11.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f4f1779c3142d913c509c2ed1de8b8f920e07a5cd65ac1f57c61cfb6bfded5a4"}, + {file = "aiohttp-3.11.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:48be7cff468c9c0d86a02e6a826e1fe159094b16d5aa2c17703e7317f791b0f9"}, + {file = "aiohttp-3.11.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:daea456b79ca2bacc7f062845bbb1139c3b3231fc83169da5a682cf385416dd1"}, + {file = "aiohttp-3.11.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c92e763cf641e10ad9342597d20060ba23de5e411aada96660e679e3f9371189"}, + {file = "aiohttp-3.11.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a750ee5a177e0f873d6b2d7d0fa6e1e7c658fc0ca8ea56438dcba2ac94bedb09"}, + {file = "aiohttp-3.11.8-cp313-cp313-win32.whl", hash = "sha256:4448c9c7f77bad48a6569062c0c16deb77fbb7363de1dc71ed087f66fb3b3c96"}, + {file = "aiohttp-3.11.8-cp313-cp313-win_amd64.whl", hash = "sha256:481075a1949de79a8a6841e0086f2f5f464785c592cf527ed0db2c0cbd0e1ba2"}, + {file = "aiohttp-3.11.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:72779bfb34d6d6b51e55a7f4901b410e416b5431738b367d49696928c91a2ca8"}, + {file = "aiohttp-3.11.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e6523f39071a01757048985e4cc22d04aa130bc40d9128503f3a61a3ee98328"}, + {file = "aiohttp-3.11.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:220bbce18b3046973465be45415430f1cab39d7fdc40cbcf0a8c05485c6902fe"}, + {file = "aiohttp-3.11.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:336bbf7a33dd8cb4a7afb98c70e9935a81e5e88f7ac595ba2e84b1fb5da190d6"}, + {file = "aiohttp-3.11.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c5e4f1ba5059b85e05c551961a448ce2689c6249ed6a2e2174796842c191d10"}, + {file = "aiohttp-3.11.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9f9fd5c672c962389429abd11ed32c9c93f7932fd58584cae1e43951b141c6b"}, + {file = "aiohttp-3.11.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58bd94ad48143e1d42e05fc055da41de0a9933f378ad87760595b8aec83d317b"}, + {file = "aiohttp-3.11.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bf52642b12d70d78c18882915201bc5345f7c8f0f2ab8919d99b886aa6475a7"}, + {file = "aiohttp-3.11.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fee12d8487b0df2b683424cca2a0d8fb7281d5607518d742e98119a74af01026"}, + {file = "aiohttp-3.11.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:65fd04f1fea668ad1af48ac31b752000e222dccffedcad3de8ccf9d34489ccd3"}, + {file = "aiohttp-3.11.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c3f397e0511a0ec4fe331e602fc057dfd336d352062deb9969ebd81e253a149c"}, + {file = "aiohttp-3.11.8-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cf8f05f4abe3288fe2e106e1461fd20d8abf6103886ddfb6d746a5b8fb830d2b"}, + {file = "aiohttp-3.11.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7d71d4ac0792ff89541179394d303be846a0b6cd3821ae67286ee69ecec16f9f"}, + {file = "aiohttp-3.11.8-cp39-cp39-win32.whl", hash = "sha256:2b6f8716044ae5e5f2a3b4e4b6bfee48e97c8b2a92e56f43aadd728c7fd26b7d"}, + {file = "aiohttp-3.11.8-cp39-cp39-win_amd64.whl", hash = "sha256:da343903214bf9f9d314b913caa499fa19e26d73e6e23a3db7d4898ea6d47028"}, + {file = "aiohttp-3.11.8.tar.gz", hash = "sha256:7bc9d64a2350cbb29a9732334e1a0743cbb6844de1731cbdf5949b235653f3fd"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] + +[[package]] +name = "aiohttp-retry" +version = "2.8.3" +description = "Simple retry client for aiohttp" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiohttp_retry-2.8.3-py3-none-any.whl", hash = "sha256:3aeeead8f6afe48272db93ced9440cf4eda8b6fd7ee2abb25357b7eb28525b45"}, + {file = "aiohttp_retry-2.8.3.tar.gz", hash = "sha256:9a8e637e31682ad36e1ff9f8bcba912fcfc7d7041722bc901a4b948da4d71ea9"}, +] + +[package.dependencies] +aiohttp = "*" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" [[package]] name = "amqp" @@ -14,6 +150,17 @@ files = [ [package.dependencies] vine = ">=5.0.0,<6.0.0" +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "asgiref" version = "3.8.1" @@ -29,16 +176,24 @@ files = [ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + [[package]] name = "billiard" version = "4.2.0" @@ -96,17 +251,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.143" +version = "1.35.54" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.143-py3-none-any.whl", hash = "sha256:0d16832f23e6bd3ae94e35ea8e625529850bfad9baccd426de96ad8f445d8e03"}, - {file = "boto3-1.34.143.tar.gz", hash = "sha256:b590ce80c65149194def43ebf0ea1cf0533945502507837389a8d22e3ecbcf05"}, + {file = "boto3-1.35.54-py3-none-any.whl", hash = "sha256:2d5e160b614db55fbee7981001c54476cb827c441cef65b2fcb2c52a62019909"}, + {file = "boto3-1.35.54.tar.gz", hash = "sha256:7d9c359bbbc858a60b51c86328db813353c8bd1940212cdbd0a7da835291c2e1"}, ] [package.dependencies] -botocore = ">=1.34.143,<1.35.0" +botocore = ">=1.35.54,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -115,13 +270,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.143" +version = "1.35.54" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.143-py3-none-any.whl", hash = "sha256:094aea179e8aaa1bc957ad49cc27d93b189dd3a1f3075d8b0ca7c445a2a88430"}, - {file = "botocore-1.34.143.tar.gz", hash = "sha256:059f032ec05733a836e04e869c5a15534420102f93116f3bc9a5b759b0651caf"}, + {file = "botocore-1.35.54-py3-none-any.whl", hash = "sha256:9cca1811094b6cdc144c2c063a3ec2db6d7c88194b04d4277cd34fc8e3473aff"}, + {file = "botocore-1.35.54.tar.gz", hash = "sha256:131bb59ce59c8a939b31e8e647242d70cf11d32d4529fa4dca01feea1e891a76"}, ] [package.dependencies] @@ -130,7 +285,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.20.11)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "cachetools" @@ -524,43 +679,38 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -573,7 +723,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -593,20 +743,20 @@ six = ">=1.13.0" [[package]] name = "deprecated" -version = "1.2.14" +version = "1.2.15" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] [[package]] name = "dj-database-url" @@ -621,13 +771,13 @@ files = [ [[package]] name = "django" -version = "5.1" +version = "5.1.4" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ - {file = "Django-5.1-py3-none-any.whl", hash = "sha256:d3b811bf5371a26def053d7ee42a9df1267ef7622323fe70a601936725aa4557"}, - {file = "Django-5.1.tar.gz", hash = "sha256:848a5980e8efb76eea70872fb0e4bc5e371619c70fffbe48e3e1b50b2c09455d"}, + {file = "Django-5.1.4-py3-none-any.whl", hash = "sha256:236e023f021f5ce7dee5779de7b286565fdea5f4ab86bae5338e3f7b69896cf0"}, + {file = "Django-5.1.4.tar.gz", hash = "sha256:de450c09e91879fa5a307f696e57c851955c910a438a35e6b4c895e86bedc82a"}, ] [package.dependencies] @@ -887,6 +1037,107 @@ future = "*" [package.extras] dev = ["Sphinx (==2.1.0)", "future (==0.17.1)", "numpy (==1.16.4)", "pytest (==4.6.1)", "pytest-mock (==1.10.4)", "tox (==3.12.1)"] +[[package]] +name = "frozenlist" +version = "1.5.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, +] + [[package]] name = "future" version = "1.0.0" @@ -1119,6 +1370,107 @@ files = [ docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] +[[package]] +name = "multidict" +version = "6.1.0" +description = "multidict implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1130,22 +1482,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - [[package]] name = "openpyxl" version = "3.1.5" @@ -1184,13 +1520,13 @@ files = [ [[package]] name = "phonenumbers" -version = "8.13.40" +version = "8.13.49" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.40-py2.py3-none-any.whl", hash = "sha256:9582752c20a1da5ec4449f7f97542bf8a793c8e2fec0ab57f767177bb8fc0b1d"}, - {file = "phonenumbers-8.13.40.tar.gz", hash = "sha256:f137c2848b8e83dd064b71881b65680584417efa202177fd330e2f7ff6c68113"}, + {file = "phonenumbers-8.13.49-py2.py3-none-any.whl", hash = "sha256:e17140955ab3d8f9580727372ea64c5ada5327932d6021ef6fd203c3db8c8139"}, + {file = "phonenumbers-8.13.49.tar.gz", hash = "sha256:e608ccb61f0bd42e6db1d2c421f7c22186b88f494870bf40aa31d1a2718ab0ae"}, ] [[package]] @@ -1345,6 +1681,113 @@ files = [ [package.dependencies] wcwidth = "*" +[[package]] +name = "propcache" +version = "0.2.0" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.8" +files = [ + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"}, + {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"}, + {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"}, + {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"}, + {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"}, + {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"}, + {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"}, + {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"}, + {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"}, + {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"}, + {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"}, + {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"}, + {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"}, + {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"}, + {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, +] + [[package]] name = "psycopg" version = "3.2.1" @@ -1357,6 +1800,7 @@ files = [ ] [package.dependencies] +psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} typing-extensions = ">=4.4" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -1368,6 +1812,20 @@ docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)" pool = ["psycopg-pool"] test = ["anyio (>=4.0)", "mypy (>=1.6)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +[[package]] +name = "psycopg-pool" +version = "3.2.2" +description = "Connection Pool for Psycopg" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg_pool-3.2.2-py3-none-any.whl", hash = "sha256:273081d0fbfaced4f35e69200c89cb8fbddfe277c38cc86c235b90a2ec2c8153"}, + {file = "psycopg_pool-3.2.2.tar.gz", hash = "sha256:9e22c370045f6d7f2666a5ad1b0caf345f9f1912195b0b25d0d3bcc4f3a7389c"}, +] + +[package.dependencies] +typing-extensions = ">=4.4" + [[package]] name = "pyasn1" version = "0.6.0" @@ -1417,15 +1875,147 @@ files = [ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +[[package]] +name = "pydantic" +version = "2.10.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, + {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.dependencies] @@ -1433,8 +2023,8 @@ cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryp [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -1448,18 +2038,6 @@ files = [ {file = "pyotp-2.4.1.tar.gz", hash = "sha256:038a3f70b34eaad3f72459e8b411662ef8dfcdd95f7d9203fa489e987a75584b"}, ] -[[package]] -name = "pysocks" -version = "1.7.1" -description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, - {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, - {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, -] - [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1614,21 +2192,18 @@ files = [ [[package]] name = "redis" -version = "5.0.7" +version = "5.2.0" description = "Python client for Redis database and key-value store" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "redis-5.0.7-py3-none-any.whl", hash = "sha256:0e479e24da960c690be5d9b96d21f7b918a98c0cf49af3b6fafaa0753f93a0db"}, - {file = "redis-5.0.7.tar.gz", hash = "sha256:8f611490b93c8109b50adc317b31bfd84fff31def3475b92e7e80bf39f48175b"}, + {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, + {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, ] -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} - [package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "regex" @@ -1753,24 +2328,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - [[package]] name = "requests-toolbelt" version = "1.0.0" @@ -1961,13 +2518,13 @@ testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "We [[package]] name = "smartmin" -version = "5.1.0" +version = "5.1.1" description = "Scaffolding system for Django object management." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "smartmin-5.1.0-py3-none-any.whl", hash = "sha256:6d7ac21129f2e66e22a55240faad69e2bcbf845afefe7bff09dae51c2ae98eec"}, - {file = "smartmin-5.1.0.tar.gz", hash = "sha256:9cb24d58ca677e837a0e2cead546c4e7ce1b74900a902319878e4418eec67e50"}, + {file = "smartmin-5.1.1-py3-none-any.whl", hash = "sha256:bdb5f34e83aeee5d9fe924efd2e79aa694e11765ecc372580c79fdc89ec9cb00"}, + {file = "smartmin-5.1.1.tar.gz", hash = "sha256:0e67a3c35982b4ca95cb640e5aae6d6f926c3cf1b4648ea13f6dc46152b80ae7"}, ] [package.dependencies] @@ -2015,35 +2572,20 @@ telegram = ["requests"] [[package]] name = "twilio" -version = "6.24.0" +version = "9.3.7" description = "Twilio API client and TwiML generator" optional = false -python-versions = "*" -files = [ - {file = "twilio-6.24.0-py2.py3-none-any.whl", hash = "sha256:68323becb99a9b960c9a0b92164288bceba4886ca0230daf4f591286bb224916"}, - {file = "twilio-6.24.0.tar.gz", hash = "sha256:cff699b660dcfb0f1e225a37b1586cd0ebcae634c3af91cd7a07b787807d744a"}, -] - -[package.dependencies] -PyJWT = ">=1.4.2" -pysocks = {version = "*", markers = "python_version >= \"3.0\""} -pytz = "*" -requests = {version = ">=2.0.0", markers = "python_version >= \"3.0\""} -six = "*" - -[[package]] -name = "twython" -version = "3.5.0" -description = "Actively maintained, pure Python wrapper for the Twitter API. Supports both normal and streaming Twitter APIs" -optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "twython-3.5.0.tar.gz", hash = "sha256:01ad5ed21f3da641a3dc58a814b435aac4cf91c691b1afc5a8dc0ca4e1affe47"}, + {file = "twilio-9.3.7-py2.py3-none-any.whl", hash = "sha256:7d5d05140530f0eaf60d6a810c88da443cb2e6aad18a0830e4cb0ccd7b338d30"}, + {file = "twilio-9.3.7.tar.gz", hash = "sha256:0f747f6c29b0ddc50a55e51739abb28c83b83d97917b02e784119058a310db05"}, ] [package.dependencies] -requests = ">=2.1.0" -requests_oauthlib = ">=0.4.0" +aiohttp = ">=3.8.4" +aiohttp-retry = "2.8.3" +PyJWT = ">=2.0.0,<3.0.0" +requests = ">=2.0.0" [[package]] name = "typing-extensions" @@ -2097,20 +2639,50 @@ files = [ [[package]] name = "vonage" -version = "2.5.2" +version = "3.17.4" description = "Vonage Server SDK for Python" optional = false -python-versions = ">=3, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "vonage-2.5.2-py2.py3-none-any.whl", hash = "sha256:fb63528e04650a7712226075dab7c99dcd8e6a6aeb90d2feaa385cb84b686054"}, - {file = "vonage-2.5.2.tar.gz", hash = "sha256:da9f9ebc14de1778b745bbfab5558f4f4b13706d0e3d624d70245b605588a11f"}, + {file = "vonage-3.17.4-py2.py3-none-any.whl", hash = "sha256:4aa7b624e1f7741fcf77cbc287f276e8ee099583a7fbf52c6b275460bc10801e"}, + {file = "vonage-3.17.4.tar.gz", hash = "sha256:be0fe06c808afef1e2f4f8e4adb1894f9e0a0e73e0e2d20ffe24869f020f62f4"}, ] [package.dependencies] Deprecated = "*" -PyJWT = {version = ">=1.6.4", extras = ["crypto"]} +pydantic = ">=2.5.2" pytz = ">=2018.5" -requests = ">=2.4.2" +requests = ">=2.32.2" +vonage-jwt = ">=1.1.4" + +[[package]] +name = "vonage-jwt" +version = "1.1.4" +description = "Tooling for working with JWTs for Vonage APIs in Python." +optional = false +python-versions = ">=3.9" +files = [ + {file = "vonage_jwt-1.1.4-py3-none-any.whl", hash = "sha256:aae3c98c588d3f383ef6984920c2c9948737f834049bf7bb688b087f04b77584"}, + {file = "vonage_jwt-1.1.4.tar.gz", hash = "sha256:dc9aadc683318696c6d426f0abc05295166884fecf9970a7800ea14a99fae71e"}, +] + +[package.dependencies] +pyjwt = {version = ">=1.6.4", extras = ["crypto"]} +vonage-utils = ">=1.1.4" + +[[package]] +name = "vonage-utils" +version = "1.1.4" +description = "Utils package containing objects for use with Vonage APIs" +optional = false +python-versions = ">=3.9" +files = [ + {file = "vonage_utils-1.1.4-py3-none-any.whl", hash = "sha256:438b0bc8da25e8026ec6789cd9a113b4f41636c70c4d84b2d9ab0e92ed23a665"}, + {file = "vonage_utils-1.1.4.tar.gz", hash = "sha256:950b802dc93f8440717fba6e183d2ae39f6e0316dc19a3f1f41ca81f9af24bf8"}, +] + +[package.dependencies] +pydantic = ">=2.9.2" [[package]] name = "wcwidth" @@ -2125,81 +2697,76 @@ files = [ [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, + {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, + {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, + {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, + {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, + {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, + {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, + {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, + {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, + {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, + {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, + {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, + {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, + {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, + {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, + {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, + {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, + {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, + {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, + {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, + {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, + {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, + {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, ] [[package]] @@ -2235,7 +2802,103 @@ files = [ {file = "xlwt-1.3.0.tar.gz", hash = "sha256:c59912717a9b28f1a3c2a98fd60741014b06b043936dcecbc113eaaada156c88"}, ] +[[package]] +name = "yarl" +version = "1.18.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +files = [ + {file = "yarl-1.18.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:074fee89caab89a97e18ef5f29060ef61ba3cae6cd77673acc54bfdd3214b7b7"}, + {file = "yarl-1.18.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b026cf2c32daf48d90c0c4e406815c3f8f4cfe0c6dfccb094a9add1ff6a0e41a"}, + {file = "yarl-1.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ae38bd86eae3ba3d2ce5636cc9e23c80c9db2e9cb557e40b98153ed102b5a736"}, + {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:685cc37f3f307c6a8e879986c6d85328f4c637f002e219f50e2ef66f7e062c1d"}, + {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8254dbfce84ee5d1e81051ee7a0f1536c108ba294c0fdb5933476398df0654f3"}, + {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20de4a8b04de70c49698dc2390b7fd2d18d424d3b876371f9b775e2b462d4b41"}, + {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0a2074a37285570d54b55820687de3d2f2b9ecf1b714e482e48c9e7c0402038"}, + {file = "yarl-1.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f576ed278860df2721a5d57da3381040176ef1d07def9688a385c8330db61a1"}, + {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3a3709450a574d61be6ac53d582496014342ea34876af8dc17cc16da32826c9a"}, + {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:bd80ed29761490c622edde5dd70537ca8c992c2952eb62ed46984f8eff66d6e8"}, + {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:32141e13a1d5a48525e519c9197d3f4d9744d818d5c7d6547524cc9eccc8971e"}, + {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8b8d3e4e014fb4274f1c5bf61511d2199e263909fb0b8bda2a7428b0894e8dc6"}, + {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:701bb4a8f4de191c8c0cc9a1e6d5142f4df880e9d1210e333b829ca9425570ed"}, + {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a45d94075ac0647621eaaf693c8751813a3eccac455d423f473ffed38c8ac5c9"}, + {file = "yarl-1.18.0-cp310-cp310-win32.whl", hash = "sha256:34176bfb082add67cb2a20abd85854165540891147f88b687a5ed0dc225750a0"}, + {file = "yarl-1.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:73553bbeea7d6ec88c08ad8027f4e992798f0abc459361bf06641c71972794dc"}, + {file = "yarl-1.18.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8e8c516dc4e1a51d86ac975b0350735007e554c962281c432eaa5822aa9765c"}, + {file = "yarl-1.18.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e6b4466714a73f5251d84b471475850954f1fa6acce4d3f404da1d55d644c34"}, + {file = "yarl-1.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c893f8c1a6d48b25961e00922724732d00b39de8bb0b451307482dc87bddcd74"}, + {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13aaf2bdbc8c86ddce48626b15f4987f22e80d898818d735b20bd58f17292ee8"}, + {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd21c0128e301851de51bc607b0a6da50e82dc34e9601f4b508d08cc89ee7929"}, + {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205de377bd23365cd85562c9c6c33844050a93661640fda38e0567d2826b50df"}, + {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed69af4fe2a0949b1ea1d012bf065c77b4c7822bad4737f17807af2adb15a73c"}, + {file = "yarl-1.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e1c18890091aa3cc8a77967943476b729dc2016f4cfe11e45d89b12519d4a93"}, + {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91b8fb9427e33f83ca2ba9501221ffaac1ecf0407f758c4d2f283c523da185ee"}, + {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:536a7a8a53b75b2e98ff96edb2dfb91a26b81c4fed82782035767db5a465be46"}, + {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a64619a9c47c25582190af38e9eb382279ad42e1f06034f14d794670796016c0"}, + {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c73a6bbc97ba1b5a0c3c992ae93d721c395bdbb120492759b94cc1ac71bc6350"}, + {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a173401d7821a2a81c7b47d4e7d5c4021375a1441af0c58611c1957445055056"}, + {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7520e799b1f84e095cce919bd6c23c9d49472deeef25fe1ef960b04cca51c3fc"}, + {file = "yarl-1.18.0-cp311-cp311-win32.whl", hash = "sha256:c4cb992d8090d5ae5f7afa6754d7211c578be0c45f54d3d94f7781c495d56716"}, + {file = "yarl-1.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:52c136f348605974c9b1c878addd6b7a60e3bf2245833e370862009b86fa4689"}, + {file = "yarl-1.18.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1ece25e2251c28bab737bdf0519c88189b3dd9492dc086a1d77336d940c28ced"}, + {file = "yarl-1.18.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:454902dc1830d935c90b5b53c863ba2a98dcde0fbaa31ca2ed1ad33b2a7171c6"}, + {file = "yarl-1.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01be8688fc211dc237e628fcc209dda412d35de7642453059a0553747018d075"}, + {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d26f1fa9fa2167bb238f6f4b20218eb4e88dd3ef21bb8f97439fa6b5313e30d"}, + {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b234a4a9248a9f000b7a5dfe84b8cb6210ee5120ae70eb72a4dcbdb4c528f72f"}, + {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe94d1de77c4cd8caff1bd5480e22342dbd54c93929f5943495d9c1e8abe9f42"}, + {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4c90c5363c6b0a54188122b61edb919c2cd1119684999d08cd5e538813a28e"}, + {file = "yarl-1.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a98ecadc5a241c9ba06de08127ee4796e1009555efd791bac514207862b43d"}, + {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9106025c7f261f9f5144f9aa7681d43867eed06349a7cfb297a1bc804de2f0d1"}, + {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f275ede6199d0f1ed4ea5d55a7b7573ccd40d97aee7808559e1298fe6efc8dbd"}, + {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f7edeb1dcc7f50a2c8e08b9dc13a413903b7817e72273f00878cb70e766bdb3b"}, + {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c083f6dd6951b86e484ebfc9c3524b49bcaa9c420cb4b2a78ef9f7a512bfcc85"}, + {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:80741ec5b471fbdfb997821b2842c59660a1c930ceb42f8a84ba8ca0f25a66aa"}, + {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1a3297b9cad594e1ff0c040d2881d7d3a74124a3c73e00c3c71526a1234a9f7"}, + {file = "yarl-1.18.0-cp312-cp312-win32.whl", hash = "sha256:cd6ab7d6776c186f544f893b45ee0c883542b35e8a493db74665d2e594d3ca75"}, + {file = "yarl-1.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:039c299a0864d1f43c3e31570045635034ea7021db41bf4842693a72aca8df3a"}, + {file = "yarl-1.18.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6fb64dd45453225f57d82c4764818d7a205ee31ce193e9f0086e493916bd4f72"}, + {file = "yarl-1.18.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3adaaf9c6b1b4fc258584f4443f24d775a2086aee82d1387e48a8b4f3d6aecf6"}, + {file = "yarl-1.18.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:da206d1ec78438a563c5429ab808a2b23ad7bc025c8adbf08540dde202be37d5"}, + {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:576d258b21c1db4c6449b1c572c75d03f16a482eb380be8003682bdbe7db2f28"}, + {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c60e547c0a375c4bfcdd60eef82e7e0e8698bf84c239d715f5c1278a73050393"}, + {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3818eabaefb90adeb5e0f62f047310079d426387991106d4fbf3519eec7d90a"}, + {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5f72421246c21af6a92fbc8c13b6d4c5427dfd949049b937c3b731f2f9076bd"}, + {file = "yarl-1.18.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fa7d37f2ada0f42e0723632993ed422f2a679af0e200874d9d861720a54f53e"}, + {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:42ba84e2ac26a3f252715f8ec17e6fdc0cbf95b9617c5367579fafcd7fba50eb"}, + {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6a49ad0102c0f0ba839628d0bf45973c86ce7b590cdedf7540d5b1833ddc6f00"}, + {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96404e8d5e1bbe36bdaa84ef89dc36f0e75939e060ca5cd45451aba01db02902"}, + {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a0509475d714df8f6d498935b3f307cd122c4ca76f7d426c7e1bb791bcd87eda"}, + {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1ff116f0285b5c8b3b9a2680aeca29a858b3b9e0402fc79fd850b32c2bcb9f8b"}, + {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2580c1d7e66e6d29d6e11855e3b1c6381971e0edd9a5066e6c14d79bc8967af"}, + {file = "yarl-1.18.0-cp313-cp313-win32.whl", hash = "sha256:14408cc4d34e202caba7b5ac9cc84700e3421a9e2d1b157d744d101b061a4a88"}, + {file = "yarl-1.18.0-cp313-cp313-win_amd64.whl", hash = "sha256:1db1537e9cb846eb0ff206eac667f627794be8b71368c1ab3207ec7b6f8c5afc"}, + {file = "yarl-1.18.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fa2c9cb607e0f660d48c54a63de7a9b36fef62f6b8bd50ff592ce1137e73ac7d"}, + {file = "yarl-1.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c0f4808644baf0a434a3442df5e0bedf8d05208f0719cedcd499e168b23bfdc4"}, + {file = "yarl-1.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7db9584235895a1dffca17e1c634b13870852094f6389b68dcc6338086aa7b08"}, + {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:309f8d27d6f93ceeeb80aa6980e883aa57895270f7f41842b92247e65d7aeddf"}, + {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:609ffd44fed2ed88d9b4ef62ee860cf86446cf066333ad4ce4123505b819e581"}, + {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f172b8b2c72a13a06ea49225a9c47079549036ad1b34afa12d5491b881f5b993"}, + {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89ae7de94631b60d468412c18290d358a9d805182373d804ec839978b120422"}, + {file = "yarl-1.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:466d31fd043ef9af822ee3f1df8fdff4e8c199a7f4012c2642006af240eade17"}, + {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7609b8462351c4836b3edce4201acb6dd46187b207c589b30a87ffd1813b48dc"}, + {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d9d4f5e471e8dc49b593a80766c2328257e405f943c56a3dc985c125732bc4cf"}, + {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:67b336c15e564d76869c9a21316f90edf546809a5796a083b8f57c845056bc01"}, + {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b212452b80cae26cb767aa045b051740e464c5129b7bd739c58fbb7deb339e7b"}, + {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:38b39b7b3e692b6c92b986b00137a3891eddb66311b229d1940dcbd4f025083c"}, + {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a7ee6884a8848792d58b854946b685521f41d8871afa65e0d4a774954e9c9e89"}, + {file = "yarl-1.18.0-cp39-cp39-win32.whl", hash = "sha256:b4095c5019bb889aa866bf12ed4c85c0daea5aafcb7c20d1519f02a1e738f07f"}, + {file = "yarl-1.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:2d90f2e4d16a5b0915ee065218b435d2ef619dd228973b1b47d262a6f7cd8fa5"}, + {file = "yarl-1.18.0-py3-none-any.whl", hash = "sha256:dbf53db46f7cf176ee01d8d98c39381440776fcda13779d269a8ba664f69bec0"}, + {file = "yarl-1.18.0.tar.gz", hash = "sha256:20d95535e7d833889982bfe7cc321b7f63bf8879788fee982c76ae2b24cfb715"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.0" + [metadata] lock-version = "2.0" -python-versions = "~3.11" -content-hash = "1e5eca221256ae4e280d282c9442ab9d6cf06f03700b08e05474ab4bec0c704b" +python-versions = "~3.12" +content-hash = "31456d76456d4581109b1efe85700b7dddec64a099fba76ead8454168bca9a4c" diff --git a/pyproject.toml b/pyproject.toml index f277de383fe..ba484b72fea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "temba" -version = "9.3.21" +version = "9.3.139" description = "Hosted service for visually building interactive messaging applications" authors = ["Nyaruka "] @@ -8,8 +8,8 @@ authors = ["Nyaruka "] repository = "http://github.com/rapidpro/rapidpro" [tool.poetry.dependencies] -python = "~3.11" -Django = "^5.1" +python = "~3.12" +Django = "^5.1.4" django-compressor = "^4.3.1" django-countries = "^7.0" django-mptt = "^0.16.0" @@ -18,15 +18,14 @@ django-storages = "^1.11.1" django-timezone-field = "^6.1.0" djangorestframework = "^3.15.1" dj-database-url = "^0.5.0" -smartmin = "^5.1.0" +smartmin = "5.1.1" celery = "^5.4.0" -redis = "^5.0.7" -boto3 = "^1.34.137" -cryptography = "^42.0.4" -vonage = "2.5.2" +redis = "^5.2.0" +boto3 = "^1.35.54" +cryptography = "^43.0.3" +vonage = "3.17.4" pyotp = "2.4.1" -twilio = "6.24.0" -twython = "3.5.0" +twilio = "9.3.7" geojson = "^2.5.0" Markdown = "^3.3.4" polib = "^1.1.0" @@ -36,7 +35,7 @@ colorama = "^0.4.6" gunicorn = "^22.0.0" iptools = "^0.7.0" iso8601 = "^0.1.14" -phonenumbers = "^8.13.40" +phonenumbers = "^8.13.49" pycountry = "^22.3.5" python-dateutil = "^2.9.0" packaging = "^22.0" @@ -46,7 +45,7 @@ openpyxl = "^3.1.5" ffmpeg-python = "^0.2.0" slack-sdk = "3.17.0" django-formtools = "^2.4.1" -psycopg = "^3.1.9" +psycopg = { extras = ["pool"], version = "^3.2.1" } pillow = "^10.1.0" django-imagekit = "^5.0.0" iso639-lang = "^2.2.3" @@ -54,7 +53,7 @@ google-auth = "^2.30.0" [tool.poetry.group.dev.dependencies] black = "^24.4.2" -coverage = {extras = ["toml"], version = "^7.5.4"} +coverage = { extras = ["toml"], version = "^7.5.4" } isort = "^5.13.2" responses = "^0.25.3" ruff = "^0.5.0" @@ -79,13 +78,20 @@ force_grid_wrap = 0 line_length = 120 include_trailing_comma = true combine_as_imports = true -sections = ["FUTURE", "STDLIB", "THIRDPARTY", "DJANGO", "FIRSTPARTY", "LOCALFOLDER"] +sections = [ + "FUTURE", + "STDLIB", + "THIRDPARTY", + "DJANGO", + "FIRSTPARTY", + "LOCALFOLDER", +] known_django = ["django"] [tool.djlint] blank_line_after_tag = "load" custom_blocks = "render" -custom_html="temba-[a-z-]+" +custom_html = "temba-[a-z-]+" format_css = true format_js = true ignore = "H021,H023" diff --git a/static/brands/rapidpro/less/variables.less b/static/brands/rapidpro/less/variables.less index 607b84d9e1d..da1eebddeb5 100644 --- a/static/brands/rapidpro/less/variables.less +++ b/static/brands/rapidpro/less/variables.less @@ -1,5 +1,5 @@ // RapidPro blue -@color-primary: #0c6596; +@color-primary: #294E8E; @color-secondary: #f3f3f3; @color-links: @color-primary; diff --git a/static/css/frame.css b/static/css/frame.css index acc20dd4e46..acd8f63cb0c 100644 --- a/static/css/frame.css +++ b/static/css/frame.css @@ -4,6 +4,7 @@ html { font-size: var(--font-size); font-family: var(--font-family); height: 100%; + line-height: normal !important; } html.dragging * { @@ -29,7 +30,7 @@ body { } .attachment .attachment-preview { - background-color: #EAEAEA; + background-color: #eaeaea; border: 0; color: #777; display: flex; @@ -41,7 +42,7 @@ body { } .attachment .attachment-download { - background-color: #E0E0E0; + background-color: #e0e0e0; border: 0; color: #666; display: inline-block; @@ -60,7 +61,7 @@ body { display: none; } -temba-menu>div { +temba-menu > div { display: none; } @@ -74,14 +75,15 @@ temba-dialog { } temba-button { - --button-shadow: 0 0px 0px 1px rgba(0, 0, 0, 0.02), 0 1px 9px 0 rgba(0, 0, 0, 0.2); + --button-shadow: 0 0px 0px 1px rgba(0, 0, 0, 0.02), + 0 1px 9px 0 rgba(0, 0, 0, 0.2); } temba-button.light { - --button-shadow: rgba(0, 0, 0, 0.05) 0px 3px 7px 0px, rgba(0, 0, 0, 0.07) 0px 1px 1px 1px; + --button-shadow: rgba(0, 0, 0, 0.05) 0px 3px 7px 0px, + rgba(0, 0, 0, 0.07) 0px 1px 1px 1px; } - temba-button { --button-bg: var(--color-primary-dark); --button-text: var(--color-text-light); @@ -90,11 +92,16 @@ temba-button { } temba-button:hover { - --button-bg-img: linear-gradient(to bottom, rgba(var(--primary-rgb), .1), transparent, transparent); + --button-bg-img: linear-gradient( + to bottom, + rgba(var(--primary-rgb), 0.1), + transparent, + transparent + ); } temba-button.active { - --button-bg-img: linear-gradient(to bottom, transparent, rgba(0, 0, 0, .05)); + --button-bg-img: linear-gradient(to bottom, transparent, rgba(0, 0, 0, 0.05)); } temba-button.light { @@ -105,11 +112,15 @@ temba-button.light { } temba-button.light:hover { - --button-bg-img: linear-gradient(to bottom, transparent, rgba(0, 0, 0, .001)); + --button-bg-img: linear-gradient( + to bottom, + transparent, + rgba(0, 0, 0, 0.001) + ); } temba-button.light.active { - --button-bg-img: linear-gradient(to bottom, transparent, rgba(0, 0, 0, .02)); + --button-bg-img: linear-gradient(to bottom, transparent, rgba(0, 0, 0, 0.02)); } temba-select:focus { @@ -138,16 +149,17 @@ temba-menu:defined { margin-left: 1em; } -.list-buttons-container.visible {} +.list-buttons-container.visible { +} .spa-container { background: #f7f7f7; overflow-y: clip; - overflow-x: visible; + overflow-x: auto; } .spa-container.loading .spa-content { - opacity: .3; + opacity: 0.3; pointer-events: none; } @@ -159,14 +171,14 @@ temba-menu:defined { .widget-container.loading .folders, .widget-container.loading .spa-content, .widget-container.loading .org-chooser { - opacity: .3; + opacity: 0.3; pointer-events: none; } .org-chooser { - background: rgba(0, 0, 0, .02); - box-shadow: 0px 0px 15px 0px rgba(0, 0, 0, .1) inset; - color: rgba(0, 0, 0, .6); + background: rgba(0, 0, 0, 0.02); + box-shadow: 0px 0px 15px 0px rgba(0, 0, 0, 0.1) inset; + color: rgba(0, 0, 0, 0.6); --color-widget-border: transparent; --color-widget-bg: transparent; --temba-select-selected-padding: 0em; @@ -196,7 +208,10 @@ temba-loading { .bg-gradient { background-repeat: no-repeat; - background-image: linear-gradient(rgb(255, 255, 255) 0%, rgb(236, 236, 236) 75%); + background-image: linear-gradient( + rgb(255, 255, 255) 0%, + rgb(236, 236, 236) 75% + ); } .lp-frame .lp-nav-item { @@ -241,7 +256,7 @@ temba-loading { } .object-list { - -webkit-transform: translate3d(0, 0, 0) + -webkit-transform: translate3d(0, 0, 0); } .button-action { @@ -256,7 +271,6 @@ temba-loading { color: #fff !important; } - #gear-container .button-light { padding-top: 0.62em; padding-bottom: 0.62em; @@ -292,7 +306,7 @@ temba-menu { temba-menu.servicing { --primary-rgb: 191, 84, 155; --color-primary-dark: rgb(var(--primary-rgb)); - --color-selection: rgba(var(--primary-rgb), .05); + --color-selection: rgba(var(--primary-rgb), 0.05); } .formax .formax-section.open { @@ -301,7 +315,7 @@ temba-menu.servicing { } .servicing:hover .hover { - background: rgba(0, 0, 0, .2); + background: rgba(0, 0, 0, 0.2); } .spa-content { @@ -328,4 +342,8 @@ temba-menu.servicing { border-color: var(--color-focus); background: var(--color-widget-bg-focused); box-shadow: var(--widget-box-shadow-focused); -} \ No newline at end of file +} + +#error-dialog { + display: none; +} diff --git a/static/css/tailwind.css b/static/css/tailwind.css index 12b8e308df1..07dbf6299e5 100644 --- a/static/css/tailwind.css +++ b/static/css/tailwind.css @@ -563,11 +563,11 @@ video { } .trans-border { - box-shadow: 0 0px 0px 4px rgba(0, 0, 0, .1); + box-shadow: 0 0px 0px 4px rgba(0, 0, 0, 0.1); } .bg-dark-alpha { - background: rgba(0, 0, 0, .2); + background: rgba(0, 0, 0, 0.2); } .text-tertiary { @@ -579,7 +579,7 @@ video { } .trans-lined-box { - box-shadow: 0 0px 0px 4px rgba(0, 0, 0, .1); + box-shadow: 0 0px 0px 4px rgba(0, 0, 0, 0.1); max-height: 0; --tw-bg-opacity: 1; background-color: rgba(255, 255, 255, var(--tw-bg-opacity)); @@ -592,7 +592,7 @@ video { transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); transition-duration: 150ms; transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); - transition-duration: 200ms + transition-duration: 200ms; } .font-primary { @@ -697,17 +697,13 @@ a:hover { } .button:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-light { @@ -735,17 +731,13 @@ a:hover { } .button-light:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-light:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-light { @@ -757,9 +749,7 @@ a:hover { .button-light:hover { cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-action { @@ -787,17 +777,13 @@ a:hover { } .button-action:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-action:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-action { @@ -809,9 +795,7 @@ a:hover { .button-action:hover { cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-action { @@ -851,17 +835,13 @@ a:hover { } .button-tertiary:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-tertiary:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-tertiary { @@ -895,17 +875,13 @@ a:hover { } .button-secondary:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-secondary:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-secondary { @@ -939,17 +915,13 @@ a:hover { } .button-primary:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-primary:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-primary { @@ -983,17 +955,13 @@ a:hover { } .button-white:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-white:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-white { @@ -1027,17 +995,13 @@ a:hover { } .button-danger:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-danger:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-danger { @@ -1071,17 +1035,13 @@ a:hover { } .button-dark-alpha:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-dark-alpha:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-dark-alpha { @@ -1115,17 +1075,13 @@ a:hover { } .button-sm:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-sm:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } .button-sm { @@ -1148,7 +1104,7 @@ a:hover { box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); transform: none; cursor: pointer; - box-shadow: inset 0 0 20px 10px rgba(255, 255, 255, .2); + box-shadow: inset 0 0 20px 10px rgba(255, 255, 255, 0.2); } .lift { @@ -1186,7 +1142,8 @@ a:hover { cursor: pointer; color: rgba(0, 0, 0, .5); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .3); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.3); } .lbl.linked:active { @@ -1219,7 +1176,8 @@ a:hover { cursor: pointer; color: rgba(0, 0, 0, .5); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .3); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.3); } .lbl-primary.linked:active { @@ -1235,7 +1193,8 @@ a:hover { cursor: pointer; color: rgba(255, 255, 255, .9); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } .lbl-primary { @@ -1247,7 +1206,8 @@ a:hover { --tw-text-opacity: 1; color: rgba(255, 255, 255, var(--tw-text-opacity)); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .1); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.1); } .lbl-primary.linked:active { @@ -1294,7 +1254,8 @@ a:hover { cursor: pointer; color: rgba(0, 0, 0, .5); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .3); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.3); } .lbl-group.linked:active { @@ -1310,7 +1271,8 @@ a:hover { cursor: pointer; color: rgba(255, 255, 255, .9); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } .lbl-group { @@ -1319,7 +1281,7 @@ a:hover { } .lbl-group temba-icon { - --icon-color: rgba(0,0,0,.5); + --icon-color: rgba(0, 0, 0, 0.5); margin-right: 3px; } @@ -1331,7 +1293,8 @@ a:hover { cursor: pointer; color: rgba(255, 255, 255, .9); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } .lbl-group.inverted { @@ -1342,11 +1305,12 @@ a:hover { cursor: pointer; color: rgba(255, 255, 255, .9); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } .lbl-group.inverted temba-icon { - --icon-color: rgba(255, 255, 255, .9); + --icon-color: rgba(255, 255, 255, 0.9); } .lbl-secondary { @@ -1374,7 +1338,8 @@ a:hover { cursor: pointer; color: rgba(0, 0, 0, .5); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .3); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.3); } .lbl-secondary.linked:active { @@ -1390,7 +1355,8 @@ a:hover { cursor: pointer; color: rgba(255, 255, 255, .9); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } .lbl-secondary { @@ -1407,7 +1373,8 @@ a:hover { --tw-text-opacity: 1; color: rgba(255, 255, 255, var(--tw-text-opacity)); text-decoration: none; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .1); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.1); } .alert { @@ -1451,7 +1418,7 @@ a:hover { .alert-danger, .alert-error { - background: rgba(255, 181, 181, .17); + background: rgba(255, 181, 181, 0.17); } .max-h-128 { @@ -1470,11 +1437,11 @@ a:hover { box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); color: rgba(255, 255, 255, .9); color: rgba(var(--error-rgb, 1)); - background: rgba(255, 181, 181, .17); + background: rgba(255, 181, 181, 0.17); } .alert-text .title { - font-weight: 400 + font-weight: 400; } .cap-label { @@ -1505,41 +1472,41 @@ a:hover { font-size: 0.75rem; text-align: left; text-transform: uppercase; - letter-spacing: 0.05em + letter-spacing: 0.05em; } .page-title { font-size: 1.875rem; --tw-text-opacity: 1; color: rgba(74, 74, 74, var(--tw-text-opacity)); - --icon-color: rgb(77,77,77); + --icon-color: rgb(77, 77, 77); } .page-subtitle { font-size: 1.5rem; --tw-text-opacity: 1; - color: rgba(113, 113, 113, var(--tw-text-opacity)) + color: rgba(113, 113, 113, var(--tw-text-opacity)); } .title { font-size: 1.5rem; margin-bottom: 0.25rem; --tw-text-opacity: 1; - color: rgba(74, 74, 74, var(--tw-text-opacity)) + color: rgba(74, 74, 74, var(--tw-text-opacity)); } .subtitle { font-size: 1.25rem; margin-bottom: 0.25rem; --tw-text-opacity: 1; - color: rgba(74, 74, 74, var(--tw-text-opacity)) + color: rgba(74, 74, 74, var(--tw-text-opacity)); } table.padded td { - padding: 0.75rem + padding: 0.75rem; } -table.list{ +table.list { --tw-bg-opacity: 1; background-color: rgba(255, 255, 255, var(--tw-bg-opacity)); border-radius: 0.5rem; @@ -1562,7 +1529,7 @@ table.list.scrolled { } table.list tr:first-child td { - border-top:none; + border-top: none; } table.list tr.checked td temba-checkbox, table.list tr.checked td:hover temba-checkbox { @@ -1598,8 +1565,8 @@ table.list.toggle thead tr:hover th { table.list.toggle thead tr th:last-child::after { transition: all 200ms ease-in-out; - content: "\e05c"; - color: rgba(0, 0, 0, .3); + content: '\e05c'; + color: rgba(0, 0, 0, 0.3); font-size: 12px; float: right; margin: -3px 0px; @@ -1614,7 +1581,7 @@ table.list.toggle tbody tr td { border: 0px; } -table.list.toggle tbody tr td>* { +table.list.toggle tbody tr td > * { max-height: 0px; } @@ -1634,7 +1601,7 @@ table.list.expanded tbody tr td { padding: 14px; } -table.list.expanded tbody tr td>* { +table.list.expanded tbody tr td > * { max-height: 500px; } @@ -1662,11 +1629,13 @@ table.list.lined td { } table.list tbody tr.warning { - background: repeating-linear-gradient(-55deg, - rgba(207, 127, 127, 0.06), - rgba(207, 127, 127, 0.06) 5px, - rgba(207, 127, 127, 0.08) 5px, - rgba(207, 127, 127, 0.08) 10px); + background: repeating-linear-gradient( + -55deg, + rgba(207, 127, 127, 0.06), + rgba(207, 127, 127, 0.06) 5px, + rgba(207, 127, 127, 0.08) 5px, + rgba(207, 127, 127, 0.08) 10px + ); border-color: rgba(207, 127, 127, 0.35); } @@ -1715,7 +1684,7 @@ table.list.selectable tbody tr td { } table.list.selectable tbody tr.hovered td { - background: rgba(var(--selection-light-rgb), .4); + background: rgba(var(--selection-light-rgb), 0.4); cursor: pointer; } @@ -1724,7 +1693,7 @@ table.list.selectable tbody td:hover temba-checkbox { } table.list.selectable tbody tr.checked td { - background: rgba(var(--selection-light-rgb), .4); + background: rgba(var(--selection-light-rgb), 0.4); border-color: rgba(var(--selection-light-rgb), 1); } @@ -1832,7 +1801,7 @@ table.header { } .lp-frame .right { - flex-grow: 1 + flex-grow: 1; } .action-buttons .linked, .gear-menu .linked { @@ -1879,6 +1848,17 @@ code { color: #2980b9; } +ol.steps, ul.steps { + padding-left: 1.5rem; + padding-right: 1.5rem; +} + +ol.steps, +ul.steps { + line-height: 2; + list-style-type: decimal; +} + .card { --tw-bg-opacity: 1; background-color: rgba(255, 255, 255, var(--tw-bg-opacity)); @@ -1983,7 +1963,11 @@ code { border-width: 1px; --tw-shadow: 0 0 #0000; box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); - background-color: rgba(0,0,0,.015); + } + +.formax .formax-section.action-summary, + .formax .formax-section.action-link { + background-color: rgba(0, 0, 0, 0.015); } .formax .formax-section.action-summary .formax-summary, .formax .formax-section.action-link .formax-summary { @@ -2035,7 +2019,7 @@ code { .formax .formax-section .formax-container { display: grid; grid-template-rows: min-content 0fr; - transition: grid-template-rows .5s; + transition: grid-template-rows 0.5s; transition-timing-function: cubic-bezier(0.175, 0.885, 0.32, 1.275); } @@ -2075,7 +2059,7 @@ code { .formax .formax-section.wide.open .formax-icon .i-container { width: 0px; margin-top: 20px; - margin-left:-25px; + margin-left: -25px; } .formax .formax-section.open { @@ -2092,8 +2076,8 @@ code { } .formax .formax-section.open .formax-icon { - padding:.5em 1em !important; - flex-basis:0 !important; + padding: 0.5em 1em !important; + flex-basis: 0 !important; } .formax .formax-section.open .formax-icon .margin-wrapper { @@ -2116,7 +2100,7 @@ code { .formax .formax-section.open .formax-summary { height: 0px; - padding:0px; + padding: 0px; } .control-group { @@ -2124,7 +2108,7 @@ code { } .text-shadow { - text-shadow: 0 2px 4px rgba(0, 0, 0, 0.10); + text-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); } .text-shadow-md { @@ -15743,7 +15727,7 @@ code { } .md\:text-shadow { - text-shadow: 0 2px 4px rgba(0, 0, 0, 0.10); + text-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); } .md\:text-shadow-md { @@ -29205,4 +29189,4 @@ code { .md\:animate-bounce { animation: bounce 1s infinite; } -} \ No newline at end of file +} diff --git a/static/css/temba-components.css b/static/css/temba-components.css index 2a88df524d8..b7ad9ba71a8 100644 --- a/static/css/temba-components.css +++ b/static/css/temba-components.css @@ -29,7 +29,7 @@ html { --color-borders: rgba(0, 0, 0, 0.07); --color-placeholder: #ccc; --color-primary-light: #eee; - --color-secondary-light: rgba(var(--secondary-rgb), .3); + --color-secondary-light: rgba(var(--secondary-rgb), 0.3); --color-primary-dark: rgb(var(--primary-rgb)); --color-secondary-dark: rgb(var(--secondary-rgb)); --color-focus: #a4cafe; @@ -38,7 +38,7 @@ html { --color-widget-border: rgb(225, 225, 225); --color-options-bg: var(--color-widget-bg); --color-selection: #f0f6ff; - --color-row-hover: rgba(var(--selection-light-rgb), .4); + --color-row-hover: rgba(var(--selection-light-rgb), 0.4); --color-available: #00f100; --color-tertiary: rgb(var(--tertiary-rgb)); --color-text-light: rgba(255, 255, 255, 1); @@ -59,7 +59,7 @@ html { --color-button-secondary-text: var(--color-text-dark); --color-button-destructive: rgb(var(--error-rgb)); --color-button-destructive-text: var(--color-text-light); - --color-button-attention: #2ecc71; + --color-button-attention: #3ca96a; --color-label-primary: var(--color-primary-dark); --color-label-primary-text: var(--color-text-light); --color-label-secondary: var(--color-secondary-dark); @@ -77,27 +77,31 @@ html { --color-alert: rgb(var(--error-rgb)); --icon-color: var(--text-color); --icon-color-circle: rgb(240, 240, 240); - --icon-color-circle-hover: rgba(245, 245, 245, .8); + --icon-color-circle-hover: rgba(245, 245, 245, 0.8); --header-bg: var(--color-primary-dark); --header-text: var(--color-text-light); --color-text-help: rgb(120, 120, 120); --color-automated: rgb(78, 205, 106); - /* Shadows */ - --widget-box-shadow: rgba(-1, -1, 0, .1) 0px 1px 7px 0px, rgba(0, 0, 0, 0.05) 0px 1px 2px 0px; - --widget-box-shadow-focused: 0 0 0 3px rgba(164, 202, 254, .45), rgba(0, 0, 0, 0.05) 0px 3px 7px 0px, rgba(0, 0, 0, 0.05) 0px 1px 2px 0px; + --widget-box-shadow: rgba(-1, -1, 0, 0.1) 0px 1px 7px 0px, + rgba(0, 0, 0, 0.05) 0px 1px 2px 0px; + --widget-box-shadow-focused: 0 0 0 3px rgba(164, 202, 254, 0.45), + rgba(0, 0, 0, 0.05) 0px 3px 7px 0px, rgba(0, 0, 0, 0.05) 0px 1px 2px 0px; --widget-box-shadow-focused-error: 0 0 0 3px rgba(var(--error-rgb), 0.3); --shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); - --shadow-widget: 0 3px 20px 0 rgba(0, 0, 0, 0.04), 0 1px 2px 0 rgba(0, 0, 0, 0.02); + --shadow-widget: 0 3px 20px 0 rgba(0, 0, 0, 0.04), + 0 1px 2px 0 rgba(0, 0, 0, 0.02); /* temba-select */ --select-input-height: inherit; --temba-select-selected-font-size: 1em; - --temba-select-selected-padding: .6em .8em; + --temba-select-selected-padding: 0.6em 0.8em; --temba-select-selected-line-height: 1.2em; - --options-block-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.03); - --options-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05); + --options-block-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), + 0 1px 2px 0 rgba(0, 0, 0, 0.03); + --options-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1), + 0 4px 6px -2px rgba(0, 0, 0, 0.05); --dropdown-shadow: rgb(0 0 0 / 15%) 0px 0px 30px, rgb(0 0 0 / 12%) 0px 2px 6px; /* buttons */ @@ -106,7 +110,7 @@ html { --button-x: 1.5em; /* textinput */ - --temba-textinput-padding: 0.6em .8em; + --temba-textinput-padding: 0.6em 0.8em; --temba-textinput-font-size: 1; /* charcount */ @@ -126,6 +130,4 @@ html { --event-padding: 0.5em 1em; --control-margin-bottom: 15px; --menu-padding: 1em; - - -} \ No newline at end of file +} diff --git a/static/images/logo-dark.svg b/static/images/logo-dark.svg index c853153f0b9..b5e4dc3b6e9 100644 --- a/static/images/logo-dark.svg +++ b/static/images/logo-dark.svg @@ -1 +1,25 @@ - \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/static/js/formax.js b/static/js/formax.js index 114dbf7934b..b627af89a80 100644 --- a/static/js/formax.js +++ b/static/js/formax.js @@ -43,13 +43,12 @@ window.fetchData = function (section) { var url; const headers = { - 'X-FORMAX': true, - 'X-PJAX': true, - 'X-FORMAX-ACTION': section.dataset.action, + 'X-Formax': true, + 'X-Formax-Action': section.dataset.action }; if (section.closest('.spa-container')) { - headers['TEMBA-SPA'] = 1; + headers['X-Temba-Spa'] = 1; } if (section.dataset.href) { @@ -59,7 +58,7 @@ window.fetchData = function (section) { const options = { headers: headers, method: 'GET', - container: id, + container: id }; return fetchAjax(url, options).then(function () { @@ -127,13 +126,12 @@ var _submitFormax = function (e) { const followRedirects = section.dataset.action === 'redirect'; const headers = { - 'X-FORMAX': true, - 'X-PJAX': true, - 'X-FORMAX-ACTION': section.dataset.action, + 'X-Formax': true, + 'X-Formax-Action': section.dataset.action }; if (section.closest('.spa-container')) { - headers['TEMBA-SPA'] = 1; + headers['X-Temba-Spa'] = 1; } var formData = new FormData(form); @@ -142,7 +140,7 @@ var _submitFormax = function (e) { headers: headers, method: 'POST', body: formData, - container: id, + container: id }; if (followRedirects) { @@ -151,7 +149,7 @@ var _submitFormax = function (e) { fetchAjax(section.dataset.href, options) .then(function (resp) { - const redirect = resp.headers.get('REDIRECT'); + const redirect = resp.headers.get('X-Formax-Redirect'); if (redirect) { if (section.dataset.action === 'redirect') { return spaGet(redirect); diff --git a/static/js/frame.js b/static/js/frame.js index f787fd2c52d..aa55e5a225f 100644 --- a/static/js/frame.js +++ b/static/js/frame.js @@ -1,5 +1,7 @@ var pendingRequests = []; +const OMIT_ORG_URLS = ['/staff/', '/org/choose/']; + function onSpload(fn) { var container = document.querySelector('.spa-container'); if (!container) { @@ -170,6 +172,7 @@ function spaPost(url, options) { const requestOptions = { ignoreEvents: false, ignoreHistory: false, + fullPage: options.fullPage || false, headers: options.headers || {} }; @@ -199,12 +202,20 @@ function spaRequest(url, options) { const ignoreHistory = options.ignoreHistory || false; const body = options.body || null; const headers = options.headers || {}; + const fullPage = options.fullPage || false; + + headers['X-Temba-Referrer-Path'] = refererPath; + headers['X-Temba-Path'] = url; - headers['TEMBA-REFERER-PATH'] = refererPath; - headers['TEMBA-PATH'] = url; + // don't include temba org for service changes + const omitted = !!OMIT_ORG_URLS.find((omitUrl) => { + if (url.includes(omitUrl)) { + return true; + } + }); - if (!ignoreHistory) { - addToHistory(url); + if (!omitted && window.org_id) { + headers['X-Temba-Org'] = window.org_id; } const ajaxOptions = { @@ -212,7 +223,8 @@ function spaRequest(url, options) { headers, ignoreEvents: ignoreEvents, cancel: true, - showErrors: !!options.showErrors + showErrors: !!options.showErrors, + ignoreHistory }; if (body) { @@ -220,10 +232,10 @@ function spaRequest(url, options) { ajaxOptions.body = body; } - return fetchAjax(url, ajaxOptions).then(hideLoading); + return fetchAjax(url, ajaxOptions, fullPage).then(hideLoading); } -function fetchAjax(url, options) { +function fetchAjax(url, options, fullPage = false) { // create our default options options = options || {}; @@ -248,14 +260,15 @@ function fetchAjax(url, options) { options['headers']['X-CSRFToken'] = csrf; } - options['headers']['TEMBA-SPA'] = 1; - options['headers']['X-PJAX'] = 1; + if (!fullPage) { + options['headers']['X-Temba-Spa'] = 1; + } let container = options['container'] || null; - // reroute any pjax requests made from spa pages and push the content there instead - if (container == '#pjax' && document.querySelector('.spa-content')) { - container = '.spa-content'; + // we don't track history for interior requests + if (container != '.spa-content') { + options['ignoreHistory'] = true; } var controller = new AbortController(); @@ -265,7 +278,16 @@ function fetchAjax(url, options) { return fetch(toFetch, options) .then(function (response) { - const toasts = response.headers.get('x-temba-toasts'); + if (response.status >= 400) { + showErrorDialog(); + return; + } + + if (!options.ignoreHistory) { + addToHistory(url); + } + + const toasts = response.headers.get('X-Temba-Toasts'); if (toasts) { const toastEle = document.querySelector('temba-toast'); if (toastEle) { @@ -279,8 +301,8 @@ function fetchAjax(url, options) { }); // if we have a version mismatch, reload the page - var version = response.headers.get('x-temba-version'); - var org = response.headers.get('x-temba-org'); + var version = response.headers.get('X-Temba-Version'); + var org = response.headers.get('X-Temba-Org'); if (response.type !== 'cors' && org && org != org_id) { if (response.redirected) { @@ -318,13 +340,18 @@ function fetchAjax(url, options) { // special case for spa content, break out into a full page load if ( container === '.spa-content' && - response.headers.get('x-temba-content-only') != 1 + response.headers.get('X-Temba-Content-Only') != 1 ) { document.location.href = response.url; return; } return response.text().then(function (body) { + if (body.startsWith('')) { + document.location.href = response.url; + return; + } + var containerEle = document.querySelector(container); if (containerEle) { // special care to unmount the editor @@ -376,12 +403,6 @@ function handleMenuClicked(event) { return; } - if (!item.popup && selection.length > 1 && selection[0] == 'ticket') { - if (window.handleTicketsMenuChanged) { - handleTicketsMenuChanged(item); - } - } - // posterize if called for if (item.href && item.posterize) { posterize(item.href); @@ -444,7 +465,7 @@ function showModax(header, endpoint, modaxOptions) { modax['-temba-redirected'] = refreshMenu; } - modax.headers = { 'TEMBA-SPA': 1 }; + modax.headers = { 'X-Temba-Spa': 1 }; modax.header = header; modax.endpoint = endpoint; @@ -464,12 +485,6 @@ document.addEventListener('temba-redirected', function (event) { spaGet(event.detail.url, true); }); -document.addEventListener('temba-pjax-complete', function () { - refreshMenu(); - hideLoading(); - handleUpdateComplete(); -}); - function loadFromState(state) { if (state && state.url) { var url = state.url; @@ -527,7 +542,7 @@ document.addEventListener('DOMContentLoaded', function () { function posterize(href) { var url = new URL(href, window.location.origin); - spaPost(url.pathname, { queryString: url.searchParams }); + spaPost(url.pathname, { queryString: url.searchParams, fullPage: true }); } function handlePosterize(ele) { @@ -593,27 +608,26 @@ document.addEventListener('DOMContentLoaded', function () { var container = document.querySelector('.spa-container'); if (container) { container.classList.remove('initial-load'); - } + container.addEventListener('click', function (event) { + // get our immediate path + const path = event.composedPath().slice(0, 10); - container.addEventListener('click', function (event) { - // get our immediate path - const path = event.composedPath().slice(0, 10); + // find the first anchor tag + const ele = path.find((ele) => ele.tagName === 'A'); - // find the first anchor tag - const ele = path.find((ele) => ele.tagName === 'A'); - - if (ele) { - const url = new URL(ele.href); - event.preventDefault(); - event.stopPropagation(); + if (ele) { + const url = new URL(ele.href); + event.preventDefault(); + event.stopPropagation(); - // if we are working within the app, use spaGet - if (url.host === window.location.host && !event.metaKey) { - spaGet(ele.href); - } else { - // otherwise open a new tab - window.open(ele.href, '_blank'); + // if we are working within the app, use spaGet + if (url.host === window.location.host && !event.metaKey) { + spaGet(ele.href); + } else { + // otherwise open a new tab + window.open(ele.href, '_blank'); + } } - } - }); + }); + } }); diff --git a/static/js/labels.js b/static/js/labels.js index 6e52d6711c7..70070c4bdd5 100644 --- a/static/js/labels.js +++ b/static/js/labels.js @@ -40,7 +40,7 @@ function runActionOnObjectRows(action, options = {}) { } if (!options.add) { - formData.append('add', "false") + formData.append('add', 'false'); } for (var i = 0; i < objectIds.length; i++) { @@ -48,9 +48,8 @@ function runActionOnObjectRows(action, options = {}) { } formData.append('action', action); - formData.append('pjax', 'true'); return spaPost(document.location.href, { - postData: formData, + postData: formData }); } diff --git a/static/js/temba.js b/static/js/temba.js index 71cec6f3f78..184e6142c70 100644 --- a/static/js/temba.js +++ b/static/js/temba.js @@ -26,6 +26,14 @@ function showLightbox(evt, url) { } } +function showErrorDialog() { + var dialog = document.querySelector('#error-dialog'); + dialog.style = 'display:block'; + dialog.width = 'initial'; + dialog.buttons = [{ type: 'secondary', name: 'Ok', closes: true }]; + dialog.open = true; +} + function showPreview(evt, ele) { evt.stopPropagation(); evt.preventDefault(); @@ -129,9 +137,9 @@ function initializeVideoPlayer(element) { vjsdownload: { beforeElement: 'playbackRateMenuButton', textControl: 'Download', - name: 'downloadButton', - }, - }, + name: 'downloadButton' + } + } }); } @@ -168,7 +176,6 @@ function stopEvent(event) { event.preventDefault(); } - document.addEventListener('temba-refresh-complete', function () { wireTableListeners(); }); diff --git a/static/scss/tailwind.scss b/static/scss/tailwind.scss index 899f1f6c3e4..587f1ef2ee3 100644 --- a/static/scss/tailwind.scss +++ b/static/scss/tailwind.scss @@ -1,8 +1,6 @@ @tailwind base; @tailwind components; - - .bg-gradient { background-repeat: no-repeat; background-attachment: fixed; @@ -10,11 +8,11 @@ } .trans-border { - box-shadow: 0 0px 0px 4px rgba(0, 0, 0, .1); + box-shadow: 0 0px 0px 4px rgba(0, 0, 0, 0.1); } .bg-dark-alpha { - background: rgba(0, 0, 0, .2); + background: rgba(0, 0, 0, 0.2); } .text-tertiary { @@ -26,7 +24,7 @@ } .trans-lined-box { - @apply py-4 rounded-lg trans-border absolute bg-white text-primary max-h-0 transition-all duration-200 ease-in-out + @apply py-4 rounded-lg trans-border absolute bg-white text-primary max-h-0 transition-all duration-200 ease-in-out; } .font-primary { @@ -105,20 +103,14 @@ a { margin-top: 1px; &:active { - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } &:hover { text-decoration: none; cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), - + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } - } .button-light { @@ -126,9 +118,7 @@ a { &:hover { cursor: pointer; - box-shadow: - 0 1px 3px 0 rgba(0, 0, 0, 0.1), - 0 1px 2px 0 rgba(0, 0, 0, 0.06), + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06); } } @@ -172,7 +162,7 @@ a { @apply shadow; transform: none; cursor: pointer; - box-shadow: inset 0 0 20px 10px rgba(255, 255, 255, .2); + box-shadow: inset 0 0 20px 10px rgba(255, 255, 255, 0.2); } } @@ -189,7 +179,8 @@ a { &.linked:hover { @apply no-underline cursor-pointer text-dark-alpha-500; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .3); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.3); } &.linked:active { @@ -202,7 +193,8 @@ a { &.linked:hover { @apply no-underline cursor-pointer text-white; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .1); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.1); } &.linked:active { @@ -210,7 +202,6 @@ a { } } - .lbl-icon { @apply text-dark-alpha-300; font-size: 11px; @@ -230,7 +221,7 @@ a { display: inline-flex; align-items: center; temba-icon { - --icon-color: rgba(0,0,0,.5); + --icon-color: rgba(0, 0, 0, 0.5); margin-right: 3px; } } @@ -240,7 +231,8 @@ a { &.linked:hover { @apply no-underline cursor-pointer text-light-alpha-900; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } } @@ -249,11 +241,12 @@ a { &.linked:hover { @apply no-underline cursor-pointer text-light-alpha-900; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(0, 0, 0, .05); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(0, 0, 0, 0.05); } temba-icon { - --icon-color: rgba(255, 255, 255, .9); + --icon-color: rgba(255, 255, 255, 0.9); } } @@ -262,7 +255,8 @@ a { &.linked:hover { @apply no-underline bg-secondary text-white cursor-pointer; - box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), inset 0 0 20px 20px rgba(255, 255, 255, .1); + box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1), 0 1px 2px 0 rgba(0, 0, 0, 0.06), + inset 0 0 20px 20px rgba(255, 255, 255, 0.1); } } @@ -283,7 +277,7 @@ a { .alert-danger, .alert-error { @apply alert text-error; - background: rgba(255, 181, 181, .17); + background: rgba(255, 181, 181, 0.17); } .max-h-128 { @@ -299,10 +293,10 @@ a { @apply text-light-alpha-900 p-4 rounded shadow; color: rgba(var(--error-rgb, 1)); - background: rgba(255, 181, 181, .17); + background: rgba(255, 181, 181, 0.17); .title { - @apply font-normal + @apply font-normal; } } @@ -315,33 +309,33 @@ a { } .table-header { - @apply border-b border-gray-300 bg-gray-200 text-left font-medium uppercase text-xs tracking-wider + @apply border-b border-gray-300 bg-gray-200 text-left font-medium uppercase text-xs tracking-wider; } .page-title { @apply text-3xl text-gray-700; - --icon-color: rgb(77,77,77); + --icon-color: rgb(77, 77, 77); } .page-subtitle { - @apply text-2xl text-gray-600 + @apply text-2xl text-gray-600; } .title { - @apply text-2xl text-gray-700 mb-1 + @apply text-2xl text-gray-700 mb-1; } .subtitle { - @apply text-xl text-gray-700 mb-1 + @apply text-xl text-gray-700 mb-1; } table.padded { td { - @apply p-3 + @apply p-3; } } -table.list{ +table.list { @apply w-full shadow rounded-lg bg-white overflow-hidden; &.sticky { @@ -359,17 +353,17 @@ table.list{ tr:first-child { td { - border-top:none; + border-top: none; } } tr { &.checked { - td, td:hover { + td, + td:hover { temba-checkbox { --icon-color: #444 !important; } - } } @@ -383,7 +377,6 @@ table.list{ } } - &.light { thead { tr th { @@ -393,7 +386,6 @@ table.list{ } &.toggle { - thead { @apply cursor-pointer select-none; @@ -408,8 +400,8 @@ table.list{ &:last-child { &::after { transition: all 200ms ease-in-out; - content: "\e05c"; - color: rgba(0, 0, 0, .3); + content: '\e05c'; + color: rgba(0, 0, 0, 0.3); font-size: 12px; float: right; margin: -3px 0px; @@ -425,13 +417,12 @@ table.list{ border: 0px; } - tbody tr td>* { + tbody tr td > * { max-height: 0px; } } &.expanded { - &.lined { @apply border-solid border-gray-300 border-l-0 border-r-0; } @@ -448,10 +439,9 @@ table.list{ padding: 14px; } - tbody tr td>* { + tbody tr td > * { max-height: 500px; } - } th { @@ -465,15 +455,16 @@ table.list{ } tbody tr.warning { - background: repeating-linear-gradient(-55deg, - rgba(207, 127, 127, 0.06), - rgba(207, 127, 127, 0.06) 5px, - rgba(207, 127, 127, 0.08) 5px, - rgba(207, 127, 127, 0.08) 10px); + background: repeating-linear-gradient( + -55deg, + rgba(207, 127, 127, 0.06), + rgba(207, 127, 127, 0.06) 5px, + rgba(207, 127, 127, 0.08) 5px, + rgba(207, 127, 127, 0.08) 10px + ); border-color: rgba(207, 127, 127, 0.35); td { - .icon-docs-2, .icon-cloud-upload { @apply text-error; @@ -512,7 +503,6 @@ table.list{ } } - &.tight, &.tight.expanded { tbody { @@ -537,7 +527,6 @@ table.list{ } } - &.selectable { tbody { tr { @@ -548,7 +537,7 @@ table.list{ tr.hovered { td { - background: rgba(var(--selection-light-rgb), .4); + background: rgba(var(--selection-light-rgb), 0.4); @apply cursor-pointer; } } @@ -560,16 +549,11 @@ table.list{ } tr.checked { - - td { - - background: rgba(var(--selection-light-rgb), .4); + background: rgba(var(--selection-light-rgb), 0.4); border-color: rgba(var(--selection-light-rgb), 1); - } } - } } } @@ -611,7 +595,6 @@ table.header { @apply w-64 mr-5; .lp-nav { - &.upper { @apply p-3 pr-4 w-64 mt-2; } @@ -639,7 +622,7 @@ table.header { } .right { - @apply flex-grow + @apply flex-grow; } } @@ -650,7 +633,8 @@ table.header { } } -.warning {} +.warning { +} code { @apply px-2 py-1 rounded-lg text-base; @@ -659,7 +643,14 @@ code { .code { @apply bg-dark-alpha-30 rounded-lg px-1 py-1 leading-tight text-base font-mono inline-block mx-1 whitespace-nowrap; color: #2980b9; +} + +ol.steps, +ul.steps { + @apply px-6; + line-height: 2; + list-style-type: decimal; } .card { @@ -722,7 +713,6 @@ code { .formax { .formax-section { - margin-left: 0px; margin-right: 0px; @@ -732,10 +722,11 @@ code { box-shadow: var(--widget-box-shadow-focused); } - &.action-summary, &.action-link { + &.action-summary, + &.action-link { @apply border border-gray-400 shadow-none bg-transparent; - background-color: rgba(0,0,0,.015); + background-color: rgba(0, 0, 0, 0.015); .formax-summary { @apply py-6 !important; @@ -771,18 +762,17 @@ code { } .i-container { - width: 30px; - &:before {} + &:before { + } } - } .formax-container { display: grid; grid-template-rows: min-content 0fr; - transition: grid-template-rows .5s; + transition: grid-template-rows 0.5s; transition-timing-function: cubic-bezier(0.175, 0.885, 0.32, 1.275); .formax-summary { @@ -806,7 +796,6 @@ code { grid-template-rows: min-content 1fr; } - &.wide.open { margin-left: -115px; margin-right: -115px; @@ -819,11 +808,11 @@ code { .i-container { width: 0px; margin-top: 20px; - margin-left:-25px; + margin-left: -25px; } } } - + &.open { flex-direction: column; margin-left: -30px; @@ -835,15 +824,14 @@ code { } .formax-icon { - padding:.5em 1em !important; - flex-basis:0 !important; - + padding: 0.5em 1em !important; + flex-basis: 0 !important; + .margin-wrapper { margin: 0px !important; } .i-container { - } @apply bg-primary text-white p-12; @@ -855,7 +843,7 @@ code { .formax-summary { height: 0px; - padding:0px; + padding: 0px; } } } @@ -867,7 +855,7 @@ code { @responsive { .text-shadow { - text-shadow: 0 2px 4px rgba(0, 0, 0, 0.10); + text-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); } .text-shadow-md { @@ -884,7 +872,6 @@ code { } @keyframes bounceFromLeft { - from, 60%, 75%, @@ -917,7 +904,6 @@ code { } @keyframes bounceInUp { - from, 60%, 75%, @@ -970,7 +956,6 @@ code { .disabled { @apply text-gray-300; --icon-color: #ccc; - } .filter { @@ -979,10 +964,6 @@ code { temba-icon { @apply mr-1; } - - } - - -@tailwind utilities; \ No newline at end of file +@tailwind utilities; diff --git a/temba/__init__.py b/temba/__init__.py index 4610cc59fac..b4d53f1f168 100644 --- a/temba/__init__.py +++ b/temba/__init__.py @@ -1,4 +1,4 @@ -__version__ = "9.3.21" +__version__ = "9.3.139" # This will make sure the app is always imported when # Django starts so that shared_task will use this app. diff --git a/temba/airtime/migrations/0030_populate_uuid.py b/temba/airtime/migrations/0030_populate_uuid.py index 3ce1a38224b..10c0818c96a 100644 --- a/temba/airtime/migrations/0030_populate_uuid.py +++ b/temba/airtime/migrations/0030_populate_uuid.py @@ -5,7 +5,7 @@ from temba.utils.uuid import uuid4 -def populate_uuid(apps, schema_editor): +def populate_uuid(apps, schema_editor): # pragma: no cover AirtimeTransfer = apps.get_model("airtime", "AirtimeTransfer") tranfers = AirtimeTransfer.objects.filter(uuid=None) diff --git a/temba/contacts/management/__init__.py b/temba/airtime/tests/__init__.py similarity index 100% rename from temba/contacts/management/__init__.py rename to temba/airtime/tests/__init__.py diff --git a/temba/airtime/tests.py b/temba/airtime/tests/test_airtimecrudl.py similarity index 78% rename from temba/airtime/tests.py rename to temba/airtime/tests/test_airtimecrudl.py index cbb401f310b..aa5437ba260 100644 --- a/temba/airtime/tests.py +++ b/temba/airtime/tests/test_airtimecrudl.py @@ -1,7 +1,7 @@ from django.urls import reverse from temba.airtime.models import AirtimeTransfer -from temba.tests import CRUDLTestMixin, MigrationTest, TembaTest +from temba.tests import CRUDLTestMixin, TembaTest class AirtimeCRUDLTest(TembaTest, CRUDLTestMixin): @@ -72,28 +72,3 @@ def test_read(self): self.assertContains(response, "Ben Haggerty") self.assertNotContains(response, "+250 700 000 003") self.assertFalse(response.context["show_logs"]) - - -class AirtimeUUIDTest(MigrationTest): - app = "airtime" - migrate_from = "0029_airtimetransfer_external_id" - migrate_to = "0030_populate_uuid" - - def setUpBeforeMigration(self, apps): - contact = self.create_contact("Eric", phone="+250788382382") - self.airtime1 = AirtimeTransfer.objects.create( - uuid=None, - org=self.org, - status="S", - contact=contact, - recipient="tel:+250788382382", - desired_amount=1.00, - actual_amount=1.00, - ) - - self.assertIsNone(self.airtime1.uuid) - - def test_migration(self): - self.airtime1.refresh_from_db() - - self.assertIsNotNone(self.airtime1.uuid) diff --git a/temba/airtime/views.py b/temba/airtime/views.py index f3e69a23529..9dec1f64550 100644 --- a/temba/airtime/views.py +++ b/temba/airtime/views.py @@ -1,21 +1,19 @@ -from smartmin.views import SmartCRUDL, SmartListView, SmartReadView +from smartmin.views import SmartCRUDL -from django.db.models import Prefetch from django.urls import reverse from django.utils.translation import gettext_lazy as _ from temba.airtime.models import AirtimeTransfer from temba.contacts.models import URN, ContactURN -from temba.orgs.views import OrgObjPermsMixin, OrgPermsMixin -from temba.request_logs.models import HTTPLog -from temba.utils.views import SpaMixin +from temba.orgs.views.base import BaseListView, BaseReadView +from temba.utils.views.mixins import SpaMixin class AirtimeCRUDL(SmartCRUDL): model = AirtimeTransfer actions = ("list", "read") - class List(SpaMixin, OrgPermsMixin, SmartListView): + class List(SpaMixin, BaseListView): menu_path = "/settings/workspace" title = _("Recent Airtime Transfers") fields = ("status", "contact", "recipient", "currency", "actual_amount", "created_on") @@ -39,15 +37,12 @@ def lookup_field_link(self, context, field, obj): return super().lookup_field_link(context, field, obj) - def derive_queryset(self, **kwargs): - return AirtimeTransfer.objects.filter(org=self.derive_org()) - def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["org"] = self.derive_org() return context - class Read(SpaMixin, OrgObjPermsMixin, SmartReadView): + class Read(SpaMixin, BaseReadView): menu_path = "/settings/workspace" title = _("Airtime Transfer Details") fields = ( @@ -72,14 +67,13 @@ def get_recipient(self, obj): org = self.derive_org() return ContactURN.ANON_MASK_HTML if org.is_anon else URN.format(obj.recipient, international=True) - def derive_queryset(self, **kwargs): - logs_prefetch = Prefetch("http_logs", HTTPLog.objects.order_by("created_on", "id")) - return AirtimeTransfer.objects.filter(org=self.derive_org()).prefetch_related(logs_prefetch) - def get_context_data(self, **kwargs): org = self.derive_org() user = self.request.user context = super().get_context_data(**kwargs) + context["show_logs"] = not org.is_anon or user.is_staff + context["http_logs"] = self.get_object().http_logs.order_by("created_on", "id") + return context diff --git a/temba/api/internal/serializers.py b/temba/api/internal/serializers.py index 840cef83650..b518f46fdf4 100644 --- a/temba/api/internal/serializers.py +++ b/temba/api/internal/serializers.py @@ -4,6 +4,7 @@ from temba.locations.models import AdminBoundary from temba.templates.models import Template, TemplateTranslation +from temba.tickets.models import Shortcut class ModelAsJsonSerializer(serializers.BaseSerializer): @@ -17,6 +18,14 @@ class Meta: fields = ("osm_id", "name", "path") +class ShortcutReadSerializer(serializers.ModelSerializer): + modified_on = serializers.DateTimeField(default_timezone=tzone.utc) + + class Meta: + model = Shortcut + fields = ("uuid", "name", "text", "modified_on") + + class TemplateReadSerializer(serializers.ModelSerializer): STATUSES = { TemplateTranslation.STATUS_PENDING: "pending", diff --git a/temba/api/internal/tests.py b/temba/api/internal/tests.py index bfdd9f772b7..564ed9158bf 100644 --- a/temba/api/internal/tests.py +++ b/temba/api/internal/tests.py @@ -1,12 +1,12 @@ from django.urls import reverse from django.utils import timezone -from temba.api.tests import APITestMixin +from temba.api.tests.mixins import APITestMixin from temba.contacts.models import ContactExport from temba.notifications.types import ExportFinishedNotificationType from temba.templates.models import TemplateTranslation from temba.tests import TembaTest, matchers -from temba.tickets.models import TicketExport +from temba.tickets.models import Shortcut, TicketExport NUM_BASE_QUERIES = 4 # number of queries required for any request (internal API is session only) @@ -122,6 +122,37 @@ def test_notifications(self): self.assertEqual(2, self.admin.notifications.filter(is_seen=True).count()) self.assertEqual(1, self.editor.notifications.filter(is_seen=False).count()) + def test_shortcuts(self): + endpoint_url = reverse("api.internal.shortcuts") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + shortcut1 = Shortcut.create(self.org, self.admin, "Planes", "Planes are...") + shortcut2 = Shortcut.create(self.org, self.admin, "Trains", "Trains are...") + Shortcut.create(self.org2, self.admin, "Cars", "Other org") + + self.assertGet( + endpoint_url, + [self.admin], + results=[ + { + "uuid": str(shortcut2.uuid), + "name": "Trains", + "text": "Trains are...", + "modified_on": matchers.ISODate(), + }, + { + "uuid": str(shortcut1.uuid), + "name": "Planes", + "text": "Planes are...", + "modified_on": matchers.ISODate(), + }, + ], + num_queries=NUM_BASE_QUERIES + 1, + ) + def test_templates(self): endpoint_url = reverse("api.internal.templates") + ".json" diff --git a/temba/api/internal/urls.py b/temba/api/internal/urls.py index 4209cdb6947..3d8fd567726 100644 --- a/temba/api/internal/urls.py +++ b/temba/api/internal/urls.py @@ -2,12 +2,13 @@ from django.urls import re_path -from .views import LocationsEndpoint, NotificationsEndpoint, TemplatesEndpoint +from .views import LocationsEndpoint, NotificationsEndpoint, ShortcutsEndpoint, TemplatesEndpoint urlpatterns = [ # ========== endpoints A-Z =========== re_path(r"^locations$", LocationsEndpoint.as_view(), name="api.internal.locations"), re_path(r"^notifications$", NotificationsEndpoint.as_view(), name="api.internal.notifications"), + re_path(r"^shortcuts$", ShortcutsEndpoint.as_view(), name="api.internal.shortcuts"), re_path(r"^templates$", TemplatesEndpoint.as_view(), name="api.internal.templates"), ] diff --git a/temba/api/internal/views.py b/temba/api/internal/views.py index d2c81c9b585..892cd4f6cbe 100644 --- a/temba/api/internal/views.py +++ b/temba/api/internal/views.py @@ -8,6 +8,7 @@ from temba.locations.models import AdminBoundary from temba.notifications.models import Notification from temba.templates.models import Template, TemplateTranslation +from temba.tickets.models import Shortcut from ..models import APIPermission, SSLPermission from ..support import APISessionAuthentication, CreatedOnCursorPagination, ModifiedOnCursorPagination @@ -85,6 +86,12 @@ def delete(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) +class ShortcutsEndpoint(ListAPIMixin, BaseEndpoint): + model = Shortcut + serializer_class = serializers.ShortcutReadSerializer + pagination_class = ModifiedOnCursorPagination + + class TemplatesEndpoint(ListAPIMixin, BaseEndpoint): """ WhatsApp templates with their translations. diff --git a/temba/api/migrations/0046_alter_apitoken_role.py b/temba/api/migrations/0046_alter_apitoken_role.py index b65815f00f1..5355b47d71a 100644 --- a/temba/api/migrations/0046_alter_apitoken_role.py +++ b/temba/api/migrations/0046_alter_apitoken_role.py @@ -9,6 +9,7 @@ class Migration(migrations.Migration): dependencies = [ ("api", "0045_apitoken_last_used_on"), ("auth", "0012_alter_user_first_name_max_length"), + ("orgs", "0150_backfill_org_prometheus_token"), ] operations = [ diff --git a/temba/api/migrations/0047_remove_apitoken_role.py b/temba/api/migrations/0047_remove_apitoken_role.py new file mode 100644 index 00000000000..0b269c02d21 --- /dev/null +++ b/temba/api/migrations/0047_remove_apitoken_role.py @@ -0,0 +1,17 @@ +# Generated by Django 5.1 on 2024-08-19 19:21 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("api", "0046_alter_apitoken_role"), + ] + + operations = [ + migrations.RemoveField( + model_name="apitoken", + name="role", + ), + ] diff --git a/temba/api/models.py b/temba/api/models.py index 7de74ca81a1..0743adcb9d5 100644 --- a/temba/api/models.py +++ b/temba/api/models.py @@ -5,7 +5,6 @@ from smartmin.models import SmartModel from django.conf import settings -from django.contrib.auth.models import Group from django.db import models from django.utils import timezone from django.utils.translation import gettext_lazy as _ @@ -73,9 +72,9 @@ def has_permission(self, request, view): if request.auth: # auth token was used - role = org.get_user_role(request.auth.user) + role = org.get_user_role(request.user) - # only editors and administrators can use API tokens + # only editors, administrators can use API tokens if role not in APIToken.ALLOWED_ROLES: return False elif org: @@ -83,13 +82,15 @@ def has_permission(self, request, view): else: return False - has_perm = role.has_api_perm(permission) + has_perm = request.user.is_staff or role.has_api_perm(permission) + if not has_perm: + return False - # viewers can only ever get from the API - if role == OrgRole.VIEWER: - return has_perm and request.method == "GET" + # viewers and servicing staff can only ever GET from the API + if role == OrgRole.VIEWER or (not role and request.user.is_staff): + return request.method == "GET" - return has_perm + return True class SSLPermission(BasePermission): # pragma: no cover @@ -215,9 +216,6 @@ class APIToken(models.Model): last_used_on = models.DateTimeField(null=True) is_active = models.BooleanField(default=True) - # TODO remove - role = models.ForeignKey(Group, on_delete=models.PROTECT, null=True) - @classmethod def create(cls, org, user): """ diff --git a/temba/api/support.py b/temba/api/support.py index cc66e5928fa..46f0b4fa17f 100644 --- a/temba/api/support.py +++ b/temba/api/support.py @@ -125,10 +125,6 @@ def get_cache_key(self, request, view): if user.is_authenticated: ident = f"{org.id if org else 0}" # scope to org - # but staff users get their own scope within the org - if user.is_staff: - ident += f"-{user.id}" - return self.cache_format % {"scope": self.scope, "ident": ident or self.get_ident(request)} diff --git a/temba/api/tasks.py b/temba/api/tasks.py index d3f4045c54e..15ec0b3c960 100644 --- a/temba/api/tasks.py +++ b/temba/api/tasks.py @@ -1,8 +1,9 @@ +import itertools + from django.conf import settings from django.utils import timezone from temba.api.models import APIToken -from temba.utils import chunk_list from temba.utils.crons import cron_task from .models import WebHookEvent @@ -33,7 +34,7 @@ def trim_webhook_events(): if settings.RETENTION_PERIODS["webhookevent"]: trim_before = timezone.now() - settings.RETENTION_PERIODS["webhookevent"] event_ids = WebHookEvent.objects.filter(created_on__lte=trim_before).values_list("id", flat=True) - for batch in chunk_list(event_ids, 1000): + for batch in itertools.batched(event_ids, 1000): num_deleted, _ = WebHookEvent.objects.filter(id__in=batch).delete() return {"deleted": num_deleted} diff --git a/temba/contacts/management/commands/__init__.py b/temba/api/tests/__init__.py similarity index 100% rename from temba/contacts/management/commands/__init__.py rename to temba/api/tests/__init__.py diff --git a/temba/api/tests.py b/temba/api/tests/mixins.py similarity index 65% rename from temba/api/tests.py rename to temba/api/tests/mixins.py index bdd7d7c6b4b..4d3c13316db 100644 --- a/temba/api/tests.py +++ b/temba/api/tests/mixins.py @@ -1,99 +1,6 @@ -from datetime import timedelta - -from django.contrib.auth.models import Group from django.db import connection -from django.test import override_settings -from django.urls import reverse -from django.utils import timezone - -from temba.orgs.models import OrgRole -from temba.tests import CRUDLTestMixin, TembaTest - -from .models import APIToken, Resthook, WebHookEvent -from .tasks import trim_webhook_events, update_tokens_used - - -class APITokenTest(TembaTest): - def setUp(self): - super().setUp() - - self.admins_group = Group.objects.get(name="Administrators") - self.editors_group = Group.objects.get(name="Editors") - - self.org2.add_user(self.admin, OrgRole.EDITOR) # our admin can act as editor for other org - - def test_create(self): - token1 = APIToken.create(self.org, self.admin) - self.assertEqual(self.org, token1.org) - self.assertEqual(self.admin, token1.user) - self.assertTrue(token1.key) - self.assertEqual(str(token1), token1.key) - - # can create another token for same user - token2 = APIToken.create(self.org, self.admin) - self.assertNotEqual(token1, token2) - self.assertNotEqual(token1.key, token2.key) - - # can't create tokens for viewer or agent users - self.assertRaises(AssertionError, APIToken.create, self.org, self.agent) - self.assertRaises(AssertionError, APIToken.create, self.org, self.user) - - def test_record_used(self): - token1 = APIToken.create(self.org, self.admin) - token2 = APIToken.create(self.org2, self.admin2) - - token1.record_used() - - update_tokens_used() - - token1.refresh_from_db() - token2.refresh_from_db() - - self.assertIsNotNone(token1.last_used_on) - self.assertIsNone(token2.last_used_on) - - -class APITokenCRUDLTest(CRUDLTestMixin, TembaTest): - def test_delete(self): - token1 = APIToken.create(self.org, self.admin) - token2 = APIToken.create(self.org, self.editor) - - delete_url = reverse("api.apitoken_delete", args=[token1.key]) - - self.assertRequestDisallowed(delete_url, [self.editor, self.admin2]) - - response = self.assertDeleteFetch(delete_url, [self.admin], as_modal=True) - self.assertContains(response, f"You are about to delete the API token {token1.key[:6]}…") - - response = self.assertDeleteSubmit(delete_url, self.admin, object_deactivated=token1) - self.assertRedirect(response, "/user/tokens/") - - token1.refresh_from_db() - token2.refresh_from_db() - - self.assertFalse(token1.is_active) - self.assertTrue(token2.is_active) - - -class WebHookTest(TembaTest): - def test_trim_events_and_results(self): - five_hours_ago = timezone.now() - timedelta(hours=5) - - # create some events - resthook = Resthook.get_or_create(org=self.org, slug="registration", user=self.admin) - WebHookEvent.objects.create(org=self.org, resthook=resthook, data={}, created_on=five_hours_ago) - - with override_settings(RETENTION_PERIODS={"webhookevent": None}): - trim_webhook_events() - self.assertTrue(WebHookEvent.objects.all()) - - with override_settings(RETENTION_PERIODS={"webhookevent": timedelta(hours=12)}): # older than our event - trim_webhook_events() - self.assertTrue(WebHookEvent.objects.all()) - with override_settings(RETENTION_PERIODS={"webhookevent": timedelta(hours=2)}): - trim_webhook_events() - self.assertFalse(WebHookEvent.objects.all()) +from temba.api.models import APIToken class APITestMixin: @@ -180,7 +87,7 @@ def as_user(user, expected_results: list, expected_queries: int = None): response = self._getJSON(endpoint_url, user, by_token=by_token, num_queries=expected_queries) if results is not None: - self.assertEqual(200, response.status_code) + self.assertEqual(200, response.status_code, f"status code mismatch for user {user}") actual_results = response.json()["results"] full_check = expected_results and isinstance(expected_results[0], dict) diff --git a/temba/api/tests/test_token.py b/temba/api/tests/test_token.py new file mode 100644 index 00000000000..72f9fcc06ad --- /dev/null +++ b/temba/api/tests/test_token.py @@ -0,0 +1,46 @@ +from django.contrib.auth.models import Group + +from temba.api.models import APIToken +from temba.api.tasks import update_tokens_used +from temba.orgs.models import OrgRole +from temba.tests import TembaTest + + +class APITokenTest(TembaTest): + def setUp(self): + super().setUp() + + self.admins_group = Group.objects.get(name="Administrators") + self.editors_group = Group.objects.get(name="Editors") + + self.org2.add_user(self.admin, OrgRole.EDITOR) # our admin can act as editor for other org + + def test_create(self): + token1 = APIToken.create(self.org, self.admin) + self.assertEqual(self.org, token1.org) + self.assertEqual(self.admin, token1.user) + self.assertTrue(token1.key) + self.assertEqual(str(token1), token1.key) + + # can create another token for same user + token2 = APIToken.create(self.org, self.admin) + self.assertNotEqual(token1, token2) + self.assertNotEqual(token1.key, token2.key) + + # can't create tokens for viewer or agent users + self.assertRaises(AssertionError, APIToken.create, self.org, self.agent) + self.assertRaises(AssertionError, APIToken.create, self.org, self.user) + + def test_record_used(self): + token1 = APIToken.create(self.org, self.admin) + token2 = APIToken.create(self.org2, self.admin2) + + token1.record_used() + + update_tokens_used() + + token1.refresh_from_db() + token2.refresh_from_db() + + self.assertIsNotNone(token1.last_used_on) + self.assertIsNone(token2.last_used_on) diff --git a/temba/api/tests/test_tokencrudl.py b/temba/api/tests/test_tokencrudl.py new file mode 100644 index 00000000000..040636652b0 --- /dev/null +++ b/temba/api/tests/test_tokencrudl.py @@ -0,0 +1,60 @@ +from django.urls import reverse + +from temba.api.models import APIToken +from temba.orgs.models import OrgRole +from temba.tests import CRUDLTestMixin, TembaTest + + +class APITokenCRUDLTest(CRUDLTestMixin, TembaTest): + def test_list(self): + tokens_url = reverse("api.apitoken_list") + + self.assertRequestDisallowed(tokens_url, [None, self.user, self.agent]) + self.assertListFetch(tokens_url, [self.admin], context_objects=[]) + + # add user to other org and create API tokens for both + self.org2.add_user(self.admin, OrgRole.EDITOR) + token1 = APIToken.create(self.org, self.admin) + token2 = APIToken.create(self.org, self.admin) + APIToken.create(self.org, self.editor) # other user + APIToken.create(self.org2, self.admin) # other org + + response = self.assertListFetch(tokens_url, [self.admin], context_objects=[token1, token2], choose_org=self.org) + self.assertContentMenu(tokens_url, self.admin, ["New"], choose_org=self.org) + + # can POST to create new token + response = self.client.post(tokens_url, {}) + self.assertRedirect(response, tokens_url) + self.assertEqual(3, self.admin.get_api_tokens(self.org).count()) + token3 = self.admin.get_api_tokens(self.org).order_by("created").last() + + # and now option to create new token is gone because we've reached the limit + response = self.assertListFetch( + tokens_url, [self.admin], context_objects=[token1, token2, token3], choose_org=self.org + ) + self.assertContentMenu(tokens_url, self.admin, [], choose_org=self.org) + + # and POSTing is noop + response = self.client.post(tokens_url, {}) + self.assertRedirect(response, tokens_url) + self.assertEqual(3, self.admin.get_api_tokens(self.org).count()) + + def test_delete(self): + token1 = APIToken.create(self.org, self.admin) + token2 = APIToken.create(self.org, self.editor) + + delete_url = reverse("api.apitoken_delete", args=[token1.key]) + + self.assertRequestDisallowed(delete_url, [self.editor, self.admin2]) + + response = self.assertDeleteFetch(delete_url, [self.admin], as_modal=True) + self.assertContains(response, f"You are about to delete the API token {token1.key[:6]}…") + + response = self.assertDeleteSubmit(delete_url, self.admin, object_deactivated=token1) + self.assertRedirect(response, "/apitoken/") + + token1.refresh_from_db() + token2.refresh_from_db() + + self.assertFalse(token1.is_active) + self.assertTrue(token2.is_active) diff --git a/temba/api/tests/test_webhook.py b/temba/api/tests/test_webhook.py new file mode 100644 index 00000000000..aa33e296f53 --- /dev/null +++ b/temba/api/tests/test_webhook.py @@ -0,0 +1,29 @@ +from datetime import timedelta + +from django.test import override_settings +from django.utils import timezone + +from temba.api.models import Resthook, WebHookEvent +from temba.api.tasks import trim_webhook_events +from temba.tests import TembaTest + + +class WebHookTest(TembaTest): + def test_trim_events_and_results(self): + five_hours_ago = timezone.now() - timedelta(hours=5) + + # create some events + resthook = Resthook.get_or_create(org=self.org, slug="registration", user=self.admin) + WebHookEvent.objects.create(org=self.org, resthook=resthook, data={}, created_on=five_hours_ago) + + with override_settings(RETENTION_PERIODS={"webhookevent": None}): + trim_webhook_events() + self.assertTrue(WebHookEvent.objects.all()) + + with override_settings(RETENTION_PERIODS={"webhookevent": timedelta(hours=12)}): # older than our event + trim_webhook_events() + self.assertTrue(WebHookEvent.objects.all()) + + with override_settings(RETENTION_PERIODS={"webhookevent": timedelta(hours=2)}): + trim_webhook_events() + self.assertFalse(WebHookEvent.objects.all()) diff --git a/temba/api/v2/fields.py b/temba/api/v2/fields.py index 9f61c5fa3a4..3d48ee80c7c 100644 --- a/temba/api/v2/fields.py +++ b/temba/api/v2/fields.py @@ -1,6 +1,5 @@ from rest_framework import fields, relations, serializers -from django.contrib.auth.models import User from django.db.models import Q from django.utils.translation import gettext_lazy as _ @@ -9,6 +8,7 @@ from temba.contacts.models import URN, Contact, ContactField as ContactFieldModel, ContactGroup, ContactURN from temba.flows.models import Flow from temba.msgs.models import Attachment, Label, Media, Msg +from temba.orgs.models import User from temba.tickets.models import Ticket, Topic from temba.utils import languages from temba.utils.uuid import find_uuid, is_uuid diff --git a/temba/api/v2/serializers.py b/temba/api/v2/serializers.py index fdff7667350..409bec730bf 100644 --- a/temba/api/v2/serializers.py +++ b/temba/api/v2/serializers.py @@ -9,7 +9,6 @@ from rest_framework import serializers from django.conf import settings -from django.contrib.auth.models import User from temba import mailroom from temba.archives.models import Archive @@ -22,7 +21,7 @@ from temba.locations.models import AdminBoundary from temba.mailroom import modifiers from temba.msgs.models import Broadcast, Label, Media, Msg, OptIn -from temba.orgs.models import Org, OrgRole +from temba.orgs.models import Org, OrgRole, User from temba.tickets.models import Ticket, Topic from temba.utils import json from temba.utils.fields import NameValidator @@ -165,19 +164,22 @@ class Meta: class BroadcastReadSerializer(ReadSerializer): STATUSES = { - "I": "queued", # may exist in older data + Broadcast.STATUS_PENDING: "pending", Broadcast.STATUS_QUEUED: "queued", - Broadcast.STATUS_SENT: "sent", + Broadcast.STATUS_STARTED: "started", + Broadcast.STATUS_COMPLETED: "completed", Broadcast.STATUS_FAILED: "failed", + Broadcast.STATUS_INTERRUPTED: "interrupted", } + status = serializers.SerializerMethodField() + progress = serializers.SerializerMethodField() urns = serializers.SerializerMethodField() contacts = fields.ContactField(many=True) groups = fields.ContactGroupField(many=True) text = serializers.SerializerMethodField() attachments = serializers.SerializerMethodField() base_language = fields.LanguageField() - status = serializers.SerializerMethodField() created_on = serializers.DateTimeField(default_timezone=tzone.utc) def get_text(self, obj): @@ -187,7 +189,10 @@ def get_attachments(self, obj): return {lang: trans.get("attachments", []) for lang, trans in obj.translations.items()} def get_status(self, obj): - return self.STATUSES.get(obj.status, "sent") + return self.STATUSES[obj.status] + + def get_progress(self, obj): + return {"total": obj.contact_count or -1, "started": obj.msg_count} def get_urns(self, obj): if self.context["org"].is_anon: @@ -197,7 +202,18 @@ def get_urns(self, obj): class Meta: model = Broadcast - fields = ("id", "urns", "contacts", "groups", "text", "attachments", "base_language", "status", "created_on") + fields = ( + "id", + "status", + "progress", + "urns", + "contacts", + "groups", + "text", + "attachments", + "base_language", + "created_on", + ) class BroadcastWriteSerializer(WriteSerializer): @@ -1034,21 +1050,16 @@ def get_path(self, obj): if not self.context["include_paths"]: return None - def convert_step(step): - arrived_on = iso8601.parse_date(step["arrived_on"]) - return {"node": step["node_uuid"], "time": format_datetime(arrived_on)} - - return [convert_step(s) for s in obj.path] + return [{"node": str(s.node), "time": format_datetime(s.time)} for s in obj.get_path()] def get_values(self, obj): def convert_result(result): return { + "name": result.get("name"), "value": result["value"], "category": result.get("category"), "node": result["node_uuid"], "time": format_datetime(iso8601.parse_date(result["created_on"])), - "input": result.get("input"), - "name": result.get("name"), } return {k: convert_result(r) for k, r in obj.results.items()} @@ -1077,13 +1088,16 @@ class Meta: class FlowStartReadSerializer(ReadSerializer): STATUSES = { FlowStart.STATUS_PENDING: "pending", - FlowStart.STATUS_STARTING: "starting", - FlowStart.STATUS_COMPLETE: "complete", + FlowStart.STATUS_QUEUED: "queued", + FlowStart.STATUS_STARTED: "started", + FlowStart.STATUS_COMPLETED: "completed", FlowStart.STATUS_FAILED: "failed", + FlowStart.STATUS_INTERRUPTED: "interrupted", } flow = fields.FlowField() status = serializers.SerializerMethodField() + progress = serializers.SerializerMethodField() groups = fields.ContactGroupField(many=True) contacts = fields.ContactField(many=True) params = serializers.JSONField(required=False) @@ -1098,6 +1112,9 @@ class FlowStartReadSerializer(ReadSerializer): def get_status(self, obj): return self.STATUSES.get(obj.status) + def get_progress(self, obj): + return {"total": obj.contact_count or -1, "started": obj.run_count} + def get_restart_participants(self, obj): return not (obj.exclusions and obj.exclusions.get(FlowStart.EXCLUSION_STARTED_PREVIOUSLY, False)) @@ -1107,15 +1124,17 @@ def get_exclude_active(self, obj): class Meta: model = FlowStart fields = ( - "id", "uuid", "flow", "status", + "progress", "groups", "contacts", "params", "created_on", "modified_on", + # deprecated + "id", "extra", "restart_participants", "exclude_active", @@ -1715,6 +1734,7 @@ class UserReadSerializer(ReadSerializer): avatar = serializers.SerializerMethodField() role = serializers.SerializerMethodField() + team = serializers.SerializerMethodField() created_on = serializers.DateTimeField(default_timezone=tzone.utc, source="date_joined") def get_avatar(self, obj): @@ -1722,12 +1742,15 @@ def get_avatar(self, obj): return settings.avatar.url if settings and settings.avatar else None def get_role(self, obj): - role = self.context["user_roles"][obj] - return self.ROLES[role] + return self.ROLES[self.context["memberships"][obj].role] + + def get_team(self, obj): + team = self.context["memberships"][obj].team + return {"uuid": str(team.uuid), "name": team.name} if team else None class Meta: model = User - fields = ("email", "first_name", "last_name", "role", "created_on", "avatar") + fields = ("email", "first_name", "last_name", "role", "team", "created_on", "avatar") class WorkspaceReadSerializer(ReadSerializer): diff --git a/temba/api/v2/tests.py b/temba/api/v2/tests.py deleted file mode 100644 index 7adaf2e4d9c..00000000000 --- a/temba/api/v2/tests.py +++ /dev/null @@ -1,5304 +0,0 @@ -import base64 -import time -from collections import OrderedDict -from datetime import datetime, timezone as tzone -from decimal import Decimal -from unittest.mock import call, patch -from urllib.parse import quote_plus - -import iso8601 -from rest_framework import serializers - -from django.conf import settings -from django.contrib.gis.geos import GEOSGeometry -from django.core.cache import cache -from django.test import override_settings -from django.urls import reverse -from django.utils import timezone - -from temba.api.models import APIToken, Resthook, WebHookEvent -from temba.archives.models import Archive -from temba.campaigns.models import Campaign, CampaignEvent -from temba.channels.models import ChannelEvent -from temba.classifiers.models import Classifier -from temba.classifiers.types.luis import LuisType -from temba.classifiers.types.wit import WitType -from temba.contacts.models import Contact, ContactField, ContactGroup, ContactURN -from temba.flows.models import Flow, FlowLabel, FlowRun, FlowStart -from temba.globals.models import Global -from temba.locations.models import BoundaryAlias -from temba.msgs.models import Broadcast, Label, Media, Msg, OptIn -from temba.orgs.models import Org, OrgRole -from temba.schedules.models import Schedule -from temba.tests import TembaTest, matchers, mock_mailroom, mock_uuids -from temba.tests.engine import MockSessionWriter -from temba.tickets.models import Topic -from temba.triggers.models import Trigger - -from ..tests import APITestMixin -from . import fields -from .serializers import format_datetime, normalize_extra - -NUM_BASE_SESSION_QUERIES = 4 # number of queries required for any request using session auth -NUM_BASE_TOKEN_QUERIES = 2 # number of queries required for any request using token auth - - -class APITest(APITestMixin, TembaTest): - def upload_media(self, user, filename: str): - self.login(user) - - with open(filename, "rb") as data: - response = self.client.post( - reverse("api.v2.media") + ".json", {"file": data}, HTTP_X_FORWARDED_HTTPS="https" - ) - self.assertEqual(201, response.status_code) - - return Media.objects.get(uuid=response.json()["uuid"]) - - -class FieldsTest(APITest): - def assert_field(self, f, *, submissions: dict, representations: dict): - f._context = {"org": self.org} # noqa - - for submitted, expected in submissions.items(): - if isinstance(expected, type) and issubclass(expected, Exception): - with self.assertRaises(expected, msg=f"expected exception for '{submitted}'"): - f.run_validation(submitted) - else: - self.assertEqual(f.run_validation(submitted), expected, f"to_internal_value mismatch for '{submitted}'") - - for value, expected in representations.items(): - self.assertEqual(f.to_representation(value), expected, f"to_representation mismatch for '{value}'") - - def test_contact(self): - joe = self.create_contact("Joe", urns=["tel:+593999123456"]) - frank = self.create_contact("Frank", urns=["twitterid:2352463463#franky"]) # urn has display fragment - voldemort = self.create_contact("", urns=[]) # no name or URNs - - self.assert_field( - fields.ContactField(source="test"), - submissions={ - joe.uuid: joe, # by UUID - joe.get_urn().urn: joe, # by URN - 0: serializers.ValidationError, - (joe.uuid, frank.uuid): serializers.ValidationError, - }, - representations={ - joe: {"uuid": str(joe.uuid), "name": "Joe"}, - }, - ) - - self.assert_field( - fields.ContactField(source="test", as_summary=True), - submissions={ - joe.uuid: joe, # by UUID - joe.get_urn().urn: joe, # by URN - 0: serializers.ValidationError, - (joe.uuid, frank.uuid): serializers.ValidationError, - }, - representations={ - joe: { - "uuid": str(joe.uuid), - "name": "Joe", - "urn": "tel:+593999123456", - "urn_display": "099 912 3456", - }, - frank: { - "uuid": str(frank.uuid), - "name": "Frank", - "urn": "twitterid:2352463463", - "urn_display": "franky", - }, - voldemort: { - "uuid": str(voldemort.uuid), - "name": "", - "urn": None, - "urn_display": None, - }, - }, - ) - - self.assert_field( - fields.ContactField(source="test", many=True), - submissions={ - (joe.uuid, frank.uuid): [joe, frank], - joe.uuid: serializers.ValidationError, - }, - representations={ - (joe, frank): [ - {"uuid": str(joe.uuid), "name": "Joe"}, - {"uuid": str(frank.uuid), "name": "Frank"}, - ] - }, - ) - - with self.anonymous(self.org): - # load contacts again without cached org on them or their urns - joe = Contact.objects.get(id=joe.id) - frank = Contact.objects.get(id=frank.id) - voldemort = Contact.objects.get(id=voldemort.id) - - self.assert_field( - fields.ContactField(source="test"), - submissions={ - joe.uuid: joe, # by UUID - joe.get_urn().urn: joe, # by URN - 0: serializers.ValidationError, - (joe.uuid, frank.uuid): serializers.ValidationError, - }, - representations={ - joe: {"uuid": str(joe.uuid), "name": "Joe"}, - frank: {"uuid": str(frank.uuid), "name": "Frank"}, - voldemort: {"uuid": str(voldemort.uuid), "name": ""}, - }, - ) - - self.assert_field( - fields.ContactField(source="test", as_summary=True), - submissions={ - joe.uuid: joe, # by UUID - joe.get_urn().urn: joe, # by URN - 0: serializers.ValidationError, - (joe.uuid, frank.uuid): serializers.ValidationError, - }, - representations={ - joe: { - "uuid": str(joe.uuid), - "name": "Joe", - "urn": "tel:********", - "urn_display": None, - "anon_display": f"{joe.id:010}", - }, - frank: { - "uuid": str(frank.uuid), - "name": "Frank", - "urn": "twitterid:********", - "urn_display": None, - "anon_display": f"{frank.id:010}", - }, - voldemort: { - "uuid": str(voldemort.uuid), - "name": "", - "urn": None, - "urn_display": None, - "anon_display": f"{voldemort.id:010}", - }, - }, - ) - - def test_language_and_translations(self): - self.assert_field( - fields.LanguageField(source="test"), - submissions={ - "eng": "eng", - "kin": "kin", - 123: serializers.ValidationError, - "base": serializers.ValidationError, - }, - representations={"eng": "eng"}, - ) - - field = fields.LimitedDictField(source="test", max_length=2) - self.assertEqual({"foo": "bar", "zed": 123}, field.run_validation({"foo": "bar", "zed": 123})) - self.assertRaises(serializers.ValidationError, field.run_validation, {"1": 1, "2": 2, "3": 3}) - - field = fields.LanguageDictField(source="test") - self.assertEqual(field.run_validation({"eng": "Hello"}), {"eng": "Hello"}) - self.assertRaises(serializers.ValidationError, field.run_validation, {"base": ""}) - - field = fields.TranslatedTextField(source="test", max_length=10) - field._context = {"org": self.org} - - self.assertEqual(field.run_validation("Hello"), {"eng": "Hello"}) - self.assertEqual(field.run_validation({"eng": "Hello"}), {"eng": "Hello"}) - self.assertEqual(field.run_validation({"eng": "Hello", "spa": "Hola"}), {"eng": "Hello", "spa": "Hola"}) - self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": ""}) # empty - self.assertRaises(serializers.ValidationError, field.run_validation, "") # empty - self.assertRaises(serializers.ValidationError, field.run_validation, " ") # blank - self.assertRaises(serializers.ValidationError, field.run_validation, 123) # not a string or dict - self.assertRaises(serializers.ValidationError, field.run_validation, {}) # no translations - self.assertRaises(serializers.ValidationError, field.run_validation, {123: "Hello"}) # lang not a str - self.assertRaises(serializers.ValidationError, field.run_validation, {"base": "Hello"}) # lang not valid - self.assertRaises(serializers.ValidationError, field.run_validation, "HelloHello1") # translation too long - self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": "HelloHello1"}) - - media1 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") - media2 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/snow.mp4") - - field = fields.TranslatedAttachmentsField(source="test") - field._context = {"org": self.org} - - self.assertEqual(field.run_validation([f"image/jpeg:{media1.url}"]), {"eng": [media1]}) - self.assertEqual(field.run_validation({"eng": [str(media1.uuid)]}), {"eng": [media1]}) - self.assertEqual( - field.run_validation({"eng": [str(media1.uuid), str(media2.uuid)], "spa": [str(media1.uuid)]}), - {"eng": [media1, media2], "spa": [media1]}, - ) - self.assertRaises(serializers.ValidationError, field.run_validation, {}) # empty - self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": [""]}) # empty - self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": [" "]}) # blank - self.assertRaises(serializers.ValidationError, field.run_validation, {"base": ["Hello"]}) # lang not valid - self.assertRaises( - serializers.ValidationError, field.run_validation, {"eng": ["Hello"]} - ) # translation not valid attachment - self.assertRaises( - serializers.ValidationError, field.run_validation, {"kin": f"image/jpeg:{media1.url}"} - ) # translation not a list - self.assertRaises( - serializers.ValidationError, field.run_validation, {"eng": [f"image/jpeg:{media1.url}"] * 11} - ) # too many - - # check that default language is based on first flow language - self.org.flow_languages = ["spa", "kin"] - self.org.save(update_fields=("flow_languages",)) - - self.assertEqual(field.to_internal_value([str(media1.uuid)]), {"spa": [media1]}) - - def test_others(self): - group = self.create_group("Customers") - field_obj = self.create_field("registered", "Registered On", value_type=ContactField.TYPE_DATETIME) - flow = self.create_flow("Test") - campaign = Campaign.create(self.org, self.admin, "Reminders #1", group) - event = CampaignEvent.create_flow_event( - self.org, self.admin, campaign, field_obj, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 - ) - media = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") - - field = fields.CampaignField(source="test") - field._context = {"org": self.org} - - self.assertEqual(field.to_internal_value(str(campaign.uuid)), campaign) - self.assertRaises(serializers.ValidationError, field.to_internal_value, {"id": 3}) # not a string or int - - field = fields.CampaignEventField(source="test") - field._context = {"org": self.org} - - self.assertEqual(field.to_internal_value(str(event.uuid)), event) - - field._context = {"org": self.org2} - - self.assertRaises(serializers.ValidationError, field.to_internal_value, event.uuid) - - deleted_channel = self.create_channel("A", "My Android", "123456") - deleted_channel.is_active = False - deleted_channel.save(update_fields=("is_active",)) - - self.assert_field( - fields.ChannelField(source="test"), - submissions={self.channel.uuid: self.channel, deleted_channel.uuid: serializers.ValidationError}, - representations={self.channel: {"uuid": str(self.channel.uuid), "name": "Test Channel"}}, - ) - - self.assert_field( - fields.ContactGroupField(source="test"), - submissions={group.uuid: group}, - representations={group: {"uuid": str(group.uuid), "name": "Customers"}}, - ) - - field_created_on = self.org.fields.get(key="created_on") - - self.assert_field( - fields.ContactFieldField(source="test"), - submissions={"registered": field_obj, "created_on": field_created_on, "xyz": serializers.ValidationError}, - representations={field_obj: {"key": "registered", "name": "Registered On", "label": "Registered On"}}, - ) - - self.assert_field( - fields.FlowField(source="test"), - submissions={flow.uuid: flow}, - representations={flow: {"uuid": str(flow.uuid), "name": flow.name}}, - ) - - self.assert_field( - fields.MediaField(source="test"), - submissions={str(media.uuid): media, "xyz": serializers.ValidationError}, - representations={media: str(media.uuid)}, - ) - - self.assert_field( - fields.TopicField(source="test"), - submissions={str(self.org.default_ticket_topic.uuid): self.org.default_ticket_topic}, - representations={ - self.org.default_ticket_topic: {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"} - }, - ) - - self.assert_field( - fields.URNField(source="test"), - submissions={ - "tel:+1-800-123-4567": "tel:+18001234567", - "tel:0788 123 123": "tel:+250788123123", # using org country - "tel:(078) 812-3123": "tel:+250788123123", - "12345": serializers.ValidationError, # un-parseable - "tel:800-123-4567": serializers.ValidationError, # no country code - f"external:{'1' * 256}": serializers.ValidationError, # too long - 18_001_234_567: serializers.ValidationError, # non-string - }, - representations={"tel:+18001234567": "tel:+18001234567"}, - ) - - self.editor.is_active = False - self.editor.save(update_fields=("is_active",)) - - self.assert_field( - fields.UserField(source="test"), - submissions={ - "VIEWER@NYARUKA.COM": self.user, - "admin@nyaruka.com": self.admin, - self.editor.email: serializers.ValidationError, # deleted - self.admin2.email: serializers.ValidationError, # not in org - }, - representations={ - self.user: {"email": "viewer@nyaruka.com", "name": ""}, - self.editor: {"email": "editor@nyaruka.com", "name": "Ed McEdits"}, - }, - ) - self.assert_field( - fields.UserField(source="test", assignable_only=True), - submissions={ - self.user.email: serializers.ValidationError, # not assignable - self.admin.email: self.admin, - self.agent.email: self.agent, - }, - representations={self.agent: {"email": "agent@nyaruka.com", "name": "Agnes"}}, - ) - - def test_serialize_urn(self): - urn_obj = ContactURN.objects.create( - org=self.org, scheme="tel", path="+250788383383", identity="tel:+250788383383", priority=50, display="xyz" - ) - urn_dict = { - "channel": {"name": "Twilio", "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8"}, - "scheme": "tel", - "path": "+250788383383", - "display": "xyz", - } - - self.assertEqual("tel:+250788383383", fields.serialize_urn(self.org, urn_obj)) - self.assertEqual(urn_dict, fields.serialize_urn(self.org, urn_dict)) - - with self.anonymous(self.org): - self.assertEqual("tel:********", fields.serialize_urn(self.org, urn_obj)) - self.assertEqual( - { - "channel": {"name": "Twilio", "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8"}, - "scheme": "tel", - "path": "********", - "display": "xyz", - }, - fields.serialize_urn(self.org, urn_dict), - ) - - -class EndpointsTest(APITest): - def setUp(self): - super().setUp() - - self.joe = self.create_contact("Joe Blow", phone="+250788123123") - self.frank = self.create_contact("Frank", urns=["twitter:franky"]) - - self.twitter = self.create_channel("TWT", "Twitter Channel", "billy_bob") - - self.hans = self.create_contact("Hans Gruber", phone="+4921551511", org=self.org2) - - self.org2channel = self.create_channel("A", "Org2Channel", "123456", country="RW", org=self.org2) - - def assertResultsById(self, response, expected): - self.assertEqual(response.status_code, 200) - self.assertEqual([r["id"] for r in response.json()["results"]], [o.pk for o in expected]) - - def assertResultsByUUID(self, response, expected): - self.assertEqual(response.status_code, 200) - self.assertEqual([r["uuid"] for r in response.json()["results"]], [str(o.uuid) for o in expected]) - - def assert404(self, response): - self.assertEqual(response.status_code, 404) - self.assertEqual(response.json(), {"detail": "Not found."}) - - @override_settings(REST_HANDLE_EXCEPTIONS=True) - @patch("temba.api.v2.views.FieldsEndpoint.get_queryset") - def test_error_handling(self, mock_get_queryset): - mock_get_queryset.side_effect = ValueError("DOH!") - - self.login(self.admin) - - response = self.client.get( - reverse("api.v2.fields") + ".json", content_type="application/json", HTTP_X_FORWARDED_HTTPS="https" - ) - self.assertContains(response, "Server Error. Site administrators have been notified.", status_code=500) - - @override_settings(FLOW_START_PARAMS_SIZE=4) - def test_normalize_extra(self): - self.assertEqual(OrderedDict(), normalize_extra({})) - self.assertEqual( - OrderedDict([("0", "a"), ("1", True), ("2", Decimal("1.0")), ("3", "")]), - normalize_extra(["a", True, Decimal("1.0"), None]), - ) - self.assertEqual(OrderedDict([("_3__x", "z")]), normalize_extra({"%3 !x": "z"})) - self.assertEqual( - OrderedDict([("0", "a"), ("1", "b"), ("2", "c"), ("3", "d")]), normalize_extra(["a", "b", "c", "d", "e"]) - ) - self.assertEqual( - OrderedDict([("a", 1), ("b", 2), ("c", 3), ("d", 4)]), - normalize_extra({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), - ) - self.assertEqual(OrderedDict([("a", "x" * 640)]), normalize_extra({"a": "x" * 641})) - - def test_authentication(self): - def request(endpoint, **headers): - return self.client.get( - f"{endpoint}.json", content_type="application/json", HTTP_X_FORWARDED_HTTPS="https", **headers - ) - - def request_by_token(endpoint, token): - return request(endpoint, HTTP_AUTHORIZATION=f"Token {token}") - - def request_by_basic_auth(endpoint, username, token): - credentials_base64 = base64.b64encode(f"{username}:{token}".encode()).decode() - return request(endpoint, HTTP_AUTHORIZATION=f"Basic {credentials_base64}") - - def request_by_session(endpoint, user): - self.login(user) - resp = request(endpoint) - self.client.logout() - return resp - - contacts_url = reverse("api.v2.contacts") - campaigns_url = reverse("api.v2.campaigns") - fields_url = reverse("api.v2.fields") - - token1 = APIToken.create(self.org, self.admin) - token2 = APIToken.create(self.org, self.editor) - token3 = APIToken.create(self.org, self.customer_support) - - # can request fields endpoint using all 3 methods - response = request_by_token(fields_url, token1.key) - self.assertEqual(200, response.status_code) - response = request_by_basic_auth(fields_url, self.admin.username, token1.key) - self.assertEqual(200, response.status_code) - response = request_by_session(fields_url, self.admin) - self.assertEqual(200, response.status_code) - - # can't fetch endpoint with invalid token - response = request_by_token(contacts_url, "1234567890") - self.assertResponseError(response, None, "Invalid token", status_code=403) - - # can't fetch endpoint with invalid token - response = request_by_basic_auth(contacts_url, self.admin.username, "1234567890") - self.assertResponseError(response, None, "Invalid token or email", status_code=403) - - # can't fetch endpoint with invalid username - response = request_by_basic_auth(contacts_url, "some@name.com", token1.key) - self.assertResponseError(response, None, "Invalid token or email", status_code=403) - - # can fetch campaigns endpoint with valid admin token - response = request_by_token(campaigns_url, token1.key) - self.assertEqual(200, response.status_code) - self.assertEqual(str(self.org.id), response["X-Temba-Org"]) - - response = request_by_basic_auth(contacts_url, self.editor.username, token2.key) - self.assertEqual(200, response.status_code) - self.assertEqual(str(self.org.id), response["X-Temba-Org"]) - - # simulate the admin user exceeding the rate limit for the v2 scope - cache.set(f"throttle_v2_{self.org.id}", [time.time() for r in range(10000)]) - - # next request they make using a token will be rejected - response = request_by_token(fields_url, token1.key) - self.assertEqual(response.status_code, 429) - - # same with basic auth - response = request_by_basic_auth(fields_url, self.admin.username, token1.key) - self.assertEqual(response.status_code, 429) - - # or if another user in same org makes a request - response = request_by_token(fields_url, token2.key) - self.assertEqual(response.status_code, 429) - - # but they can still make a request if they have a session - response = request_by_session(fields_url, self.admin) - self.assertEqual(response.status_code, 200) - - # or if they're a staff user because they are user-scoped - response = request_by_token(fields_url, token3.key) - self.assertEqual(response.status_code, 200) - - # are allowed to access if we have not reached the configured org api rates - self.org.api_rates = {"v2": "15000/hour"} - self.org.save(update_fields=("api_rates",)) - - response = request_by_basic_auth(fields_url, self.admin.username, token1.key) - self.assertEqual(response.status_code, 200) - - cache.set(f"throttle_v2_{self.org.id}", [time.time() for r in range(15000)]) - - # next request they make using a token will be rejected - response = request_by_token(fields_url, token1.key) - self.assertEqual(response.status_code, 429) - - # if user is demoted to a role that can't use tokens, tokens shouldn't work for them - self.org.add_user(self.admin, OrgRole.VIEWER) - - self.assertEqual(request_by_token(campaigns_url, token1.key).status_code, 403) - self.assertEqual(request_by_basic_auth(campaigns_url, self.admin.username, token1.key).status_code, 403) - - # and if user is inactive, disallow the request - self.org.add_user(self.admin, OrgRole.ADMINISTRATOR) - self.admin.is_active = False - self.admin.save() - - response = request_by_token(contacts_url, token1.key) - self.assertResponseError(response, None, "Invalid token", status_code=403) - - response = request_by_basic_auth(contacts_url, self.admin.username, token1.key) - self.assertResponseError(response, None, "Invalid token or email", status_code=403) - - @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_HTTPS", "https")) - def test_root(self): - root_url = reverse("api.v2.root") - - # browse as HTML anonymously (should still show docs) - response = self.client.get(root_url) - self.assertContains(response, "We provide a RESTful JSON API") - - # POSTing just returns the docs with a 405 - response = self.client.post(root_url, {}) - self.assertContains(response, "We provide a RESTful JSON API", status_code=405) - - # same thing if user navigates to just /api - response = self.client.get(reverse("api"), follow=True) - self.assertContains(response, "We provide a RESTful JSON API") - - # try to browse as JSON anonymously - response = self.client.get(root_url + ".json") - self.assertEqual(200, response.status_code) - self.assertIsInstance(response.json(), dict) - self.assertEqual(response.json()["runs"], "http://testserver/api/v2/runs") # endpoints are listed - - def test_docs(self): - messages_url = reverse("api.v2.messages") - - # test fetching docs anonymously - response = self.client.get(messages_url) - self.assertContains(response, "This endpoint allows you to list messages in your account.") - - # you can also post to docs endpoints tho it just returns the docs with a 403 - response = self.client.post(messages_url, {}) - self.assertContains(response, "This endpoint allows you to list messages in your account.", status_code=403) - - # test fetching docs logged in - self.login(self.editor) - response = self.client.get(messages_url) - self.assertContains(response, "This endpoint allows you to list messages in your account.") - - def test_explorer(self): - explorer_url = reverse("api.v2.explorer") - - response = self.client.get(explorer_url) - self.assertLoginRedirect(response) - - # viewers can't access - self.login(self.user) - response = self.client.get(explorer_url) - self.assertLoginRedirect(response) - - # editors and administrators can - self.login(self.editor) - response = self.client.get(explorer_url) - self.assertEqual(200, response.status_code) - - self.login(self.admin) - - response = self.client.get(explorer_url) - self.assertContains(response, "To use the explorer you need to first create") - self.assertContains(response, reverse("orgs.user_tokens")) - - APIToken.create(self.org, self.admin) - - response = self.client.get(explorer_url) - self.assertContains(response, "All operations work against real data in the Nyaruka workspace.") - - def test_pagination(self): - endpoint_url = reverse("api.v2.runs") + ".json" - self.login(self.admin) - - # create 1255 test runs (5 full pages of 250 items + 1 partial with 5 items) - flow = self.create_flow("Test") - runs = [] - for r in range(1255): - runs.append(FlowRun(org=self.org, flow=flow, contact=self.joe, status="C", exited_on=timezone.now())) - FlowRun.objects.bulk_create(runs) - actual_ids = list(FlowRun.objects.order_by("-pk").values_list("pk", flat=True)) - - # give them all the same modified_on - FlowRun.objects.all().update(modified_on=datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc)) - - returned_ids = [] - - # fetch all full pages - with self.mockReadOnly(): - resp_json = None - for p in range(5): - response = self.client.get( - endpoint_url if p == 0 else resp_json["next"], content_type="application/json" - ) - self.assertEqual(200, response.status_code) - - resp_json = response.json() - - self.assertEqual(len(resp_json["results"]), 250) - self.assertIsNotNone(resp_json["next"]) - - returned_ids += [r["id"] for r in response.json()["results"]] - - # fetch final partial page - with self.mockReadOnly(): - response = self.client.get(resp_json["next"], content_type="application/json") - - resp_json = response.json() - self.assertEqual(len(resp_json["results"]), 5) - self.assertIsNone(resp_json["next"]) - - returned_ids += [r["id"] for r in response.json()["results"]] - - self.assertEqual(returned_ids, actual_ids) # ensure all results were returned and in correct order - - @patch("temba.flows.models.FlowStart.create") - def test_transactions(self, mock_flowstart_create): - """ - Serializer writes are wrapped in a transaction. This test simulates FlowStart.create blowing up and checks that - contacts aren't created. - """ - mock_flowstart_create.side_effect = ValueError("DOH!") - - flow = self.create_flow("Test") - - try: - self.assertPost( - reverse("api.v2.flow_starts") + ".json", - self.admin, - {"flow": str(flow.uuid), "urns": ["tel:+12067791212"]}, - status=201, - ) - self.fail() # ensure exception is thrown - except ValueError: - pass - - self.assertFalse(Contact.objects.filter(urns__path="+12067791212")) - - def test_archives(self): - endpoint_url = reverse("api.v2.archives") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - # create some archives - Archive.objects.create( - org=self.org, - start_date=datetime(2017, 4, 5), - build_time=12, - record_count=34, - size=345, - hash="c4ca4238a0b923820dcc509a6f75849b", - archive_type=Archive.TYPE_MSG, - period=Archive.PERIOD_DAILY, - ) - archive2 = Archive.objects.create( - org=self.org, - start_date=datetime(2017, 5, 5), - build_time=12, - record_count=34, - size=345, - hash="c81e728d9d4c2f636f067f89cc14862c", - archive_type=Archive.TYPE_MSG, - period=Archive.PERIOD_MONTHLY, - ) - archive3 = Archive.objects.create( - org=self.org, - start_date=datetime(2017, 6, 5), - build_time=12, - record_count=34, - size=345, - hash="eccbc87e4b5ce2fe28308fd9f2a7baf3", - archive_type=Archive.TYPE_FLOWRUN, - period=Archive.PERIOD_DAILY, - ) - archive4 = Archive.objects.create( - org=self.org, - start_date=datetime(2017, 7, 5), - build_time=12, - record_count=34, - size=345, - hash="a87ff679a2f3e71d9181a67b7542122c", - archive_type=Archive.TYPE_FLOWRUN, - period=Archive.PERIOD_MONTHLY, - ) - # this archive has been rolled up and it should not be included in the API responses - Archive.objects.create( - org=self.org, - start_date=datetime(2017, 5, 1), - build_time=12, - record_count=34, - size=345, - hash="e4da3b7fbbce2345d7772b0674a318d5", - archive_type=Archive.TYPE_FLOWRUN, - period=Archive.PERIOD_DAILY, - rollup=archive2, - ) - - # create archive for other org - Archive.objects.create( - org=self.org2, - start_date=datetime(2017, 5, 1), - build_time=12, - record_count=34, - size=345, - hash="1679091c5a880faf6fb5e6087eb1b2dc", - archive_type=Archive.TYPE_FLOWRUN, - period=Archive.PERIOD_DAILY, - ) - - # there should be 4 archives in the response, because one has been rolled up - self.assertGet( - endpoint_url, - [self.editor], - results=[ - { - "archive_type": "run", - "download_url": "", - "hash": "a87ff679a2f3e71d9181a67b7542122c", - "period": "monthly", - "record_count": 34, - "size": 345, - "start_date": "2017-07-05", - }, - { - "archive_type": "run", - "download_url": "", - "hash": "eccbc87e4b5ce2fe28308fd9f2a7baf3", - "period": "daily", - "record_count": 34, - "size": 345, - "start_date": "2017-06-05", - }, - { - "archive_type": "message", - "download_url": "", - "hash": "c81e728d9d4c2f636f067f89cc14862c", - "period": "monthly", - "record_count": 34, - "size": 345, - "start_date": "2017-05-05", - }, - { - "archive_type": "message", - "download_url": "", - "hash": "c4ca4238a0b923820dcc509a6f75849b", - "period": "daily", - "record_count": 34, - "size": 345, - "start_date": "2017-04-05", - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 2, - ) - - self.assertGet(endpoint_url + "?after=2017-05-01", [self.editor], results=[archive4, archive3, archive2]) - self.assertGet(endpoint_url + "?after=2017-05-01&archive_type=run", [self.editor], results=[archive4, archive3]) - - # unknown archive type - self.assertGet(endpoint_url + "?archive_type=invalid", [self.editor], results=[]) - - # only for dailies - self.assertGet( - endpoint_url + "?after=2017-05-01&archive_type=run&period=daily", [self.editor], results=[archive3] - ) - - # only for monthlies - self.assertGet(endpoint_url + "?period=monthly", [self.editor], results=[archive4, archive2]) - - # test access from a user with no org - self.login(self.non_org_user) - response = self.client.get(endpoint_url) - self.assertEqual(403, response.status_code) - - def test_boundaries(self): - endpoint_url = reverse("api.v2.boundaries") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - self.setUpLocations() - - BoundaryAlias.create(self.org, self.admin, self.state1, "Kigali") - BoundaryAlias.create(self.org, self.admin, self.state2, "East Prov") - BoundaryAlias.create(self.org2, self.admin2, self.state1, "Other Org") # shouldn't be returned - - self.state1.simplified_geometry = GEOSGeometry("MULTIPOLYGON(((1 1, 1 -1, -1 -1, -1 1, 1 1)))") - self.state1.save() - - # test without geometry - self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[ - { - "osm_id": "1708283", - "name": "Kigali City", - "parent": {"osm_id": "171496", "name": "Rwanda"}, - "level": 1, - "aliases": ["Kigali", "Kigari"], - "geometry": None, - }, - { - "osm_id": "171113181", - "name": "Kageyo", - "parent": {"osm_id": "R1711131", "name": "Gatsibo"}, - "level": 3, - "aliases": [], - "geometry": None, - }, - { - "osm_id": "1711142", - "name": "Rwamagana", - "parent": {"osm_id": "171591", "name": "Eastern Province"}, - "level": 2, - "aliases": [], - "geometry": None, - }, - { - "osm_id": "1711163", - "name": "Kay\u00f4nza", - "parent": {"osm_id": "171591", "name": "Eastern Province"}, - "level": 2, - "aliases": [], - "geometry": None, - }, - { - "osm_id": "171116381", - "name": "Kabare", - "parent": {"osm_id": "1711163", "name": "Kay\u00f4nza"}, - "level": 3, - "aliases": [], - "geometry": None, - }, - {"osm_id": "171496", "name": "Rwanda", "parent": None, "level": 0, "aliases": [], "geometry": None}, - { - "osm_id": "171591", - "name": "Eastern Province", - "parent": {"osm_id": "171496", "name": "Rwanda"}, - "level": 1, - "aliases": ["East Prov"], - "geometry": None, - }, - { - "osm_id": "3963734", - "name": "Nyarugenge", - "parent": {"osm_id": "1708283", "name": "Kigali City"}, - "level": 2, - "aliases": [], - "geometry": None, - }, - { - "osm_id": "R1711131", - "name": "Gatsibo", - "parent": {"osm_id": "171591", "name": "Eastern Province"}, - "level": 2, - "aliases": [], - "geometry": None, - }, - { - "osm_id": "VMN.49.1_1", - "name": "Bukure", - "parent": {"osm_id": "1711142", "name": "Rwamagana"}, - "level": 3, - "aliases": [], - "geometry": None, - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 3, - ) - - # test with geometry - self.assertGet( - endpoint_url + "?geometry=true", - [self.admin], - results=[ - { - "osm_id": "1708283", - "name": "Kigali City", - "parent": {"osm_id": "171496", "name": "Rwanda"}, - "level": 1, - "aliases": ["Kigali", "Kigari"], - "geometry": { - "type": "MultiPolygon", - "coordinates": [[[[1.0, 1.0], [1.0, -1.0], [-1.0, -1.0], [-1.0, 1.0], [1.0, 1.0]]]], - }, - }, - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - matchers.Dict(), - ], - num_queries=NUM_BASE_SESSION_QUERIES + 3, - ) - - # if org doesn't have a country, just return no results - self.org.country = None - self.org.save(update_fields=("country",)) - - self.assertGet(endpoint_url, [self.admin], results=[]) - - @mock_mailroom - def test_broadcasts(self, mr_mocks): - endpoint_url = reverse("api.v2.broadcasts") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotAllowed(endpoint_url) - - reporters = self.create_group("Reporters", [self.joe, self.frank]) - - bcast1 = self.create_broadcast(self.admin, {"eng": {"text": "Hello 1"}}, urns=["twitter:franky"], status="Q") - bcast2 = self.create_broadcast(self.admin, {"eng": {"text": "Hello 2"}}, contacts=[self.joe], status="Q") - bcast3 = self.create_broadcast(self.admin, {"eng": {"text": "Hello 3"}}, contacts=[self.frank], status="S") - bcast4 = self.create_broadcast( - self.admin, - {"eng": {"text": "Hello 4"}}, - urns=["twitter:franky"], - contacts=[self.joe], - groups=[reporters], - status="F", - ) - self.create_broadcast( - self.admin, - {"eng": {"text": "Scheduled"}}, - contacts=[self.joe], - schedule=Schedule.create(self.org, timezone.now(), Schedule.REPEAT_DAILY), - ) - self.create_broadcast(self.admin2, {"eng": {"text": "Different org..."}}, contacts=[self.hans], org=self.org2) - - # no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[bcast4, bcast3, bcast2, bcast1], - num_queries=NUM_BASE_SESSION_QUERIES + 3, - ) - resp_json = response.json() - - self.assertEqual( - { - "id": bcast2.id, - "urns": [], - "contacts": [{"uuid": self.joe.uuid, "name": self.joe.name}], - "groups": [], - "text": {"eng": "Hello 2"}, - "attachments": {"eng": []}, - "base_language": "eng", - "status": "queued", - "created_on": format_datetime(bcast2.created_on), - }, - resp_json["results"][2], - ) - self.assertEqual( - { - "id": bcast4.id, - "urns": ["twitter:franky"], - "contacts": [{"uuid": self.joe.uuid, "name": self.joe.name}], - "groups": [{"uuid": reporters.uuid, "name": reporters.name}], - "text": {"eng": "Hello 4"}, - "attachments": {"eng": []}, - "base_language": "eng", - "status": "failed", - "created_on": format_datetime(bcast4.created_on), - }, - resp_json["results"][0], - ) - - # filter by id - self.assertGet(endpoint_url + f"?id={bcast3.id}", [self.editor], results=[bcast3]) - - # filter by before / after - self.assertGet( - endpoint_url + f"?before={format_datetime(bcast2.created_on)}", [self.editor], results=[bcast2, bcast1] - ) - self.assertGet( - endpoint_url + f"?after={format_datetime(bcast3.created_on)}", [self.editor], results=[bcast4, bcast3] - ) - - with self.anonymous(self.org): - response = self.assertGet(endpoint_url + f"?id={bcast1.id}", [self.editor], results=[bcast1]) - - # URNs shouldn't be included - self.assertIsNone(response.json()["results"][0]["urns"]) - - # try to create new broadcast with no data at all - self.assertPost( - endpoint_url, self.admin, {}, errors={"non_field_errors": "Must provide either text or attachments."} - ) - - # try to create new broadcast with no recipients - self.assertPost( - endpoint_url, - self.admin, - {"text": "Hello"}, - errors={"non_field_errors": "Must provide either urns, contacts or groups."}, - ) - - # try to create new broadcast with invalid group lookup - self.assertPost( - endpoint_url, - self.admin, - {"text": "Hello", "groups": [123456]}, - errors={"groups": "No such object: 123456"}, - ) - - # try to create new broadcast with translations that don't include base language - self.assertPost( - endpoint_url, - self.admin, - {"text": {"kin": "Muraho"}, "base_language": "eng", "contacts": [self.joe.uuid]}, - errors={"non_field_errors": "No text translation provided in base language."}, - ) - - media1 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") - media2 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/snow.mp4") - - # try to create new broadcast with attachment translations that don't include base language - self.assertPost( - endpoint_url, - self.admin, - { - "text": {"eng": "Hello"}, - "attachments": {"spa": [str(media1.uuid)]}, - "base_language": "eng", - "contacts": [self.joe.uuid], - }, - errors={"non_field_errors": "No attachment translations provided in base language."}, - ) - - # create new broadcast with all fields - response = self.assertPost( - endpoint_url, - self.admin, - { - "text": {"eng": "Hello @contact.name", "spa": "Hola @contact.name"}, - "attachments": { - "eng": [str(media1.uuid), f"video/mp4:http://example.com/{media2.uuid}.mp4"], - "kin": [str(media2.uuid)], - }, - "base_language": "eng", - "urns": ["twitter:franky"], - "contacts": [self.joe.uuid, self.frank.uuid], - "groups": [reporters.uuid], - }, - status=201, - ) - - broadcast = Broadcast.objects.get(id=response.json()["id"]) - self.assertEqual( - { - "eng": { - "text": "Hello @contact.name", - "attachments": [f"image/jpeg:{media1.url}", f"video/mp4:{media2.url}"], - }, - "spa": {"text": "Hola @contact.name"}, - "kin": {"attachments": [f"video/mp4:{media2.url}"]}, - }, - broadcast.translations, - ) - self.assertEqual("eng", broadcast.base_language) - self.assertEqual(["twitter:franky"], broadcast.urns) - self.assertEqual({self.joe, self.frank}, set(broadcast.contacts.all())) - self.assertEqual({reporters}, set(broadcast.groups.all())) - - # create new broadcast without translations - response = self.assertPost( - endpoint_url, - self.admin, - { - "text": "Hello", - "attachments": [str(media1.uuid), str(media2.uuid)], - "contacts": [self.joe.uuid, self.frank.uuid], - }, - status=201, - ) - - broadcast = Broadcast.objects.get(id=response.json()["id"]) - self.assertEqual( - { - "eng": { - "text": "Hello", - "attachments": [f"image/jpeg:{media1.url}", f"video/mp4:{media2.url}"], - } - }, - broadcast.translations, - ) - self.assertEqual("eng", broadcast.base_language) - self.assertEqual({self.joe, self.frank}, set(broadcast.contacts.all())) - - # create new broadcast without translations containing only text, no attachments - response = self.assertPost( - endpoint_url, - self.admin, - {"text": "Hello", "contacts": [self.joe.uuid, self.frank.uuid]}, - status=201, - ) - - broadcast = Broadcast.objects.get(id=response.json()["id"]) - self.assertEqual({"eng": {"text": "Hello"}}, broadcast.translations) - - # create new broadcast without translations containing only attachments, no text - response = self.assertPost( - endpoint_url, - self.admin, - {"attachments": [str(media1.uuid), str(media2.uuid)], "contacts": [self.joe.uuid, self.frank.uuid]}, - status=201, - ) - - broadcast = Broadcast.objects.get(id=response.json()["id"]) - self.assertEqual( - {"eng": {"attachments": [f"image/jpeg:{media1.url}", f"video/mp4:{media2.url}"]}}, - broadcast.translations, - ) - - # try sending as a flagged org - self.org.flag() - self.assertPost( - endpoint_url, - self.admin, - {"text": "Hello", "contacts": [self.joe.uuid]}, - errors={"non_field_errors": Org.BLOCKER_FLAGGED}, - ) - - def test_campaigns(self): - endpoint_url = reverse("api.v2.campaigns") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotAllowed(endpoint_url) - - reporters = self.create_group("Reporters", [self.joe, self.frank]) - other_group = self.create_group("Others", []) - campaign1 = Campaign.create(self.org, self.admin, "Reminders #1", reporters) - campaign2 = Campaign.create(self.org, self.admin, "Reminders #2", reporters) - - # create campaign for other org - spammers = ContactGroup.get_or_create(self.org2, self.admin2, "Spammers") - spam = Campaign.create(self.org2, self.admin2, "Spam", spammers) - - # no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[ - { - "uuid": str(campaign2.uuid), - "name": "Reminders #2", - "archived": False, - "group": {"uuid": reporters.uuid, "name": "Reporters"}, - "created_on": format_datetime(campaign2.created_on), - }, - { - "uuid": str(campaign1.uuid), - "name": "Reminders #1", - "archived": False, - "group": {"uuid": reporters.uuid, "name": "Reporters"}, - "created_on": format_datetime(campaign1.created_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 2, - ) - - # filter by UUID - self.assertGet(endpoint_url + f"?uuid={campaign1.uuid}", [self.editor], results=[campaign1]) - - # try to create empty campaign - self.assertPost( - endpoint_url, - self.editor, - {}, - errors={"name": "This field is required.", "group": "This field is required."}, - ) - - # create new campaign - response = self.assertPost( - endpoint_url, self.editor, {"name": "Reminders #3", "group": reporters.uuid}, status=201 - ) - - campaign3 = Campaign.objects.get(name="Reminders #3") - self.assertEqual( - response.json(), - { - "uuid": str(campaign3.uuid), - "name": "Reminders #3", - "archived": False, - "group": {"uuid": reporters.uuid, "name": "Reporters"}, - "created_on": format_datetime(campaign3.created_on), - }, - ) - - # try to create another campaign with same name - self.assertPost( - endpoint_url, - self.editor, - {"name": "Reminders #3", "group": reporters.uuid}, - errors={"name": "This field must be unique."}, - ) - - # it's fine if a campaign in another org has that name - self.assertPost(endpoint_url, self.editor, {"name": "Spam", "group": reporters.uuid}, status=201) - - # try to create a campaign with name that's too long - self.assertPost( - endpoint_url, - self.editor, - {"name": "x" * 65, "group": reporters.uuid}, - errors={"name": "Ensure this field has no more than 64 characters."}, - ) - - # update campaign by UUID - self.assertPost( - endpoint_url + f"?uuid={campaign3.uuid}", self.editor, {"name": "Reminders III", "group": other_group.uuid} - ) - - campaign3.refresh_from_db() - self.assertEqual(campaign3.name, "Reminders III") - self.assertEqual(campaign3.group, other_group) - - # can't update campaign in other org - self.assertPost( - endpoint_url + f"?uuid={spam.uuid}", self.editor, {"name": "Won't work", "group": spammers.uuid}, status=404 - ) - - # can't update deleted campaign - campaign1.is_active = False - campaign1.save(update_fields=("is_active",)) - - self.assertPost( - endpoint_url + f"?uuid={campaign1.uuid}", - self.editor, - {"name": "Won't work", "group": spammers.uuid}, - status=404, - ) - - # can't update inactive or archived campaign - campaign1.is_active = True - campaign1.is_archived = True - campaign1.save(update_fields=("is_active", "is_archived")) - - self.assertPost( - endpoint_url + f"?uuid={campaign1.uuid}", - self.editor, - {"name": "Won't work", "group": spammers.uuid}, - status=404, - ) - - @mock_mailroom - def test_campaign_events(self, mr_mocks): - endpoint_url = reverse("api.v2.campaign_events") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotPermitted(endpoint_url, [None, self.user, self.agent]) - - flow = self.create_flow("Test Flow") - reporters = self.create_group("Reporters", [self.joe, self.frank]) - registration = self.create_field("registration", "Registration", value_type=ContactField.TYPE_DATETIME) - field_created_on = self.org.fields.get(key="created_on") - - # create our contact and set a registration date - contact = self.create_contact( - "Joe", phone="+12065551515", fields={"registration": self.org.format_datetime(timezone.now())} - ) - reporters.contacts.add(contact) - - campaign1 = Campaign.create(self.org, self.admin, "Reminders", reporters) - event1 = CampaignEvent.create_message_event( - self.org, - self.admin, - campaign1, - registration, - 1, - CampaignEvent.UNIT_DAYS, - "Don't forget to brush your teeth", - ) - - campaign2 = Campaign.create(self.org, self.admin, "Notifications", reporters) - event2 = CampaignEvent.create_flow_event( - self.org, self.admin, campaign2, registration, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 - ) - - campaign3 = Campaign.create(self.org, self.admin, "Alerts", reporters) - event3 = CampaignEvent.create_flow_event( - self.org, self.admin, campaign3, field_created_on, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 - ) - - # create event for another org - joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) - spammers = ContactGroup.get_or_create(self.org2, self.admin2, "Spammers") - spam = Campaign.create(self.org2, self.admin2, "Cool stuff", spammers) - CampaignEvent.create_flow_event( - self.org2, self.admin2, spam, joined, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 - ) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[ - { - "uuid": str(event3.uuid), - "campaign": {"uuid": str(campaign3.uuid), "name": "Alerts"}, - "relative_to": {"key": "created_on", "name": "Created On", "label": "Created On"}, - "offset": 6, - "unit": "hours", - "delivery_hour": 12, - "flow": {"uuid": flow.uuid, "name": "Test Flow"}, - "message": None, - "created_on": format_datetime(event3.created_on), - }, - { - "uuid": str(event2.uuid), - "campaign": {"uuid": str(campaign2.uuid), "name": "Notifications"}, - "relative_to": {"key": "registration", "name": "Registration", "label": "Registration"}, - "offset": 6, - "unit": "hours", - "delivery_hour": 12, - "flow": {"uuid": flow.uuid, "name": "Test Flow"}, - "message": None, - "created_on": format_datetime(event2.created_on), - }, - { - "uuid": str(event1.uuid), - "campaign": {"uuid": str(campaign1.uuid), "name": "Reminders"}, - "relative_to": {"key": "registration", "name": "Registration", "label": "Registration"}, - "offset": 1, - "unit": "days", - "delivery_hour": -1, - "flow": None, - "message": {"eng": "Don't forget to brush your teeth"}, - "created_on": format_datetime(event1.created_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 4, - ) - - # filter by UUID - self.assertGet(endpoint_url + f"?uuid={event1.uuid}", [self.editor], results=[event1]) - - # filter by campaign name - self.assertGet(endpoint_url + "?campaign=Reminders", [self.editor], results=[event1]) - - # filter by campaign UUID - self.assertGet(endpoint_url + f"?campaign={campaign1.uuid}", [self.editor], results=[event1]) - - # filter by invalid campaign - self.assertGet(endpoint_url + "?campaign=Invalid", [self.editor], results=[]) - - # try to create empty campaign event - self.assertPost( - endpoint_url, - self.editor, - {}, - errors={ - "campaign": "This field is required.", - "relative_to": "This field is required.", - "offset": "This field is required.", - "unit": "This field is required.", - "delivery_hour": "This field is required.", - }, - ) - - # try again with some invalid values - self.assertPost( - endpoint_url, - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "epocs", - "delivery_hour": 25, - "message": {"kin": "Muraho"}, - }, - errors={ - "unit": '"epocs" is not a valid choice.', - "delivery_hour": "Ensure this value is less than or equal to 23.", - "message": "Message text in default flow language is required.", - }, - ) - - # provide valid values for those fields.. but not a message or flow - self.assertPost( - endpoint_url, - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - }, - errors={ - "non_field_errors": "Flow or a message text required.", - }, - ) - - # create a message event - self.assertPost( - endpoint_url, - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "message": "You are @fields.age", - }, - status=201, - ) - - event1 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() - self.assertEqual(event1.event_type, CampaignEvent.TYPE_MESSAGE) - self.assertEqual(event1.relative_to, registration) - self.assertEqual(event1.offset, 15) - self.assertEqual(event1.unit, "W") - self.assertEqual(event1.delivery_hour, -1) - self.assertEqual(event1.message, {"eng": "You are @fields.age"}) - self.assertIsNotNone(event1.flow) - - # try to create a message event with an empty message - self.assertPost( - endpoint_url, - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "message": "", - }, - errors={("message", "eng"): "This field may not be blank."}, - ) - - self.assertPost( - endpoint_url, - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "created_on", - "offset": 15, - "unit": "days", - "delivery_hour": -1, - "message": "Nice unit of work @fields.code", - }, - status=201, - ) - - event1 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() - self.assertEqual(event1.event_type, CampaignEvent.TYPE_MESSAGE) - self.assertEqual(event1.relative_to, field_created_on) - self.assertEqual(event1.offset, 15) - self.assertEqual(event1.unit, "D") - self.assertEqual(event1.delivery_hour, -1) - self.assertEqual(event1.message, {"eng": "Nice unit of work @fields.code"}) - self.assertIsNotNone(event1.flow) - - # create a flow event - self.assertPost( - endpoint_url, - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "flow": str(flow.uuid), - }, - status=201, - ) - - event2 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() - self.assertEqual(event2.event_type, CampaignEvent.TYPE_FLOW) - self.assertEqual(event2.relative_to, registration) - self.assertEqual(event2.offset, 15) - self.assertEqual(event2.unit, "W") - self.assertEqual(event2.delivery_hour, -1) - self.assertEqual(event2.message, None) - self.assertEqual(event2.flow, flow) - - # make sure we queued a mailroom task to schedule this event - self.assertEqual( - { - "org_id": self.org.id, - "type": "schedule_campaign_event", - "queued_on": matchers.Datetime(), - "task": {"campaign_event_id": event2.id, "org_id": self.org.id}, - }, - mr_mocks.queued_batch_tasks[-1], - ) - - # update the message event to be a flow event - self.assertPost( - endpoint_url + f"?uuid={event1.uuid}", - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "flow": str(flow.uuid), - }, - ) - - event1 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() - - self.assertEqual(event1.event_type, CampaignEvent.TYPE_FLOW) - self.assertIsNone(event1.message) - self.assertEqual(event1.flow, flow) - - # and update the flow event to be a message event - self.assertPost( - endpoint_url + f"?uuid={event2.uuid}", - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "message": {"eng": "OK @(format_urn(urns.tel))", "fra": "D'accord"}, - }, - ) - - event2 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() - self.assertEqual(event2.event_type, CampaignEvent.TYPE_MESSAGE) - self.assertEqual(event2.message, {"eng": "OK @(format_urn(urns.tel))", "fra": "D'accord"}) - - # and update update it's message again - self.assertPost( - endpoint_url + f"?uuid={event2.uuid}", - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "message": {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}, - }, - ) - - event2 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() - self.assertEqual(event2.event_type, CampaignEvent.TYPE_MESSAGE) - self.assertEqual(event2.message, {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}) - - # try to change an existing event's campaign - self.assertPost( - endpoint_url + f"?uuid={event1.uuid}", - self.editor, - { - "campaign": str(campaign2.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "message": {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}, - }, - errors={"campaign": "Cannot change campaign for existing events"}, - ) - - # try an empty delete request - self.assertDelete( - endpoint_url, self.editor, errors={None: "URL must contain one of the following parameters: uuid"} - ) - - # delete an event by UUID - self.assertDelete(endpoint_url + f"?uuid={event1.uuid}", self.editor) - - self.assertFalse(CampaignEvent.objects.filter(id=event1.id, is_active=True).exists()) - - # can't make changes to events on archived campaigns - campaign1.archive(self.admin) - - self.assertPost( - endpoint_url + f"?uuid={event2.uuid}", - self.editor, - { - "campaign": str(campaign1.uuid), - "relative_to": "registration", - "offset": 15, - "unit": "weeks", - "delivery_hour": -1, - "message": {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}, - }, - errors={"campaign": f"No such object: {campaign1.uuid}"}, - ) - - def test_channels(self): - endpoint_url = reverse("api.v2.channels") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - # create deleted channel - deleted = self.create_channel("JC", "Deleted", "nyaruka") - deleted.release(self.admin) - - # create channel for other org - self.create_channel("TWT", "Twitter Channel", "nyaruka", org=self.org2) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[ - { - "uuid": self.twitter.uuid, - "name": "Twitter Channel", - "address": "billy_bob", - "country": None, - "device": None, - "last_seen": None, - "created_on": format_datetime(self.twitter.created_on), - }, - { - "uuid": self.channel.uuid, - "name": "Test Channel", - "address": "+250785551212", - "country": "RW", - "device": { - "name": "Nexus 5X", - "network_type": None, - "power_level": -1, - "power_source": None, - "power_status": None, - }, - "last_seen": format_datetime(self.channel.last_seen), - "created_on": format_datetime(self.channel.created_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 2, - ) - - # filter by UUID - self.assertGet(endpoint_url + f"?uuid={self.twitter.uuid}", [self.admin], results=[self.twitter]) - - # filter by address - self.assertGet(endpoint_url + "?address=billy_bob", [self.admin], results=[self.twitter]) - - def test_channel_events(self): - endpoint_url = reverse("api.v2.channel_events") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - call1 = self.create_channel_event(self.channel, "tel:+250788123123", ChannelEvent.TYPE_CALL_IN_MISSED) - call2 = self.create_channel_event( - self.channel, "tel:+250788124124", ChannelEvent.TYPE_CALL_IN, extra=dict(duration=36) - ) - call3 = self.create_channel_event(self.channel, "tel:+250788124124", ChannelEvent.TYPE_CALL_OUT_MISSED) - call4 = self.create_channel_event( - self.channel, "tel:+250788123123", ChannelEvent.TYPE_CALL_OUT, extra=dict(duration=15) - ) - - # no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[call4, call3, call2, call1], - num_queries=NUM_BASE_SESSION_QUERIES + 3, - ) - - resp_json = response.json() - self.assertEqual( - resp_json["results"][0], - { - "id": call4.pk, - "channel": {"uuid": self.channel.uuid, "name": "Test Channel"}, - "type": "call-out", - "contact": {"uuid": self.joe.uuid, "name": self.joe.name}, - "occurred_on": format_datetime(call4.occurred_on), - "extra": dict(duration=15), - "created_on": format_datetime(call4.created_on), - }, - ) - - # filter by id - self.assertGet(endpoint_url + f"?id={call1.id}", [self.editor], results=[call1]) - - # filter by contact - self.assertGet(endpoint_url + f"?contact={self.joe.uuid}", [self.editor], results=[call4, call1]) - - # filter by invalid contact - self.assertGet(endpoint_url + "?contact=invalid", [self.editor], results=[]) - - # filter by before / after - self.assertGet( - endpoint_url + f"?before={format_datetime(call3.created_on)}", [self.editor], results=[call3, call2, call1] - ) - self.assertGet( - endpoint_url + f"?after={format_datetime(call2.created_on)}", [self.editor], results=[call4, call3, call2] - ) - - def test_classifiers(self): - endpoint_url = reverse("api.v2.classifiers") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - # create some classifiers - c1 = Classifier.create(self.org, self.admin, WitType.slug, "Booker", {}) - c1.intents.create(name="book_flight", external_id="book_flight", created_on=timezone.now(), is_active=True) - c1.intents.create(name="book_hotel", external_id="book_hotel", created_on=timezone.now(), is_active=False) - c1.intents.create(name="book_car", external_id="book_car", created_on=timezone.now(), is_active=True) - - c2 = Classifier.create(self.org, self.admin, WitType.slug, "Old Booker", {}) - c2.is_active = False - c2.save() - - # on another org - Classifier.create(self.org2, self.admin, LuisType.slug, "Org2 Booker", {}) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[ - { - "name": "Booker", - "type": "wit", - "uuid": str(c1.uuid), - "intents": ["book_car", "book_flight"], - "created_on": format_datetime(c1.created_on), - } - ], - num_queries=NUM_BASE_SESSION_QUERIES + 2, - ) - - # filter by uuid (not there) - self.assertGet(endpoint_url + "?uuid=09d23a05-47fe-11e4-bfe9-b8f6b119e9ab", [self.editor], results=[]) - - # filter by uuid present - self.assertGet(endpoint_url + f"?uuid={c1.uuid}", [self.user, self.editor, self.admin], results=[c1]) - - @mock_mailroom - def test_contacts(self, mr_mocks): - endpoint_url = reverse("api.v2.contacts") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotPermitted(endpoint_url, [None, self.user]) - self.assertDeleteNotPermitted(endpoint_url, [None, self.user, self.agent]) - - # create some more contacts (in addition to Joe and Frank) - contact1 = self.create_contact( - "Ann", phone="0788000001", language="fra", fields={"nickname": "Annie", "gender": "female"} - ) - contact2 = self.create_contact("Bob", phone="0788000002") - contact3 = self.create_contact("Cat", phone="0788000003") - contact4 = self.create_contact( - "Don", phone="0788000004", language="fra", fields={"nickname": "Donnie", "gender": "male"} - ) - - contact1.stop(self.user) - contact2.block(self.user) - contact3.release(self.user) - - # put some contacts in a group - group = self.create_group("Customers", contacts=[self.joe, contact4]) - other_org_group = self.create_group("Nerds", org=self.org2) - - # tweak modified_on so we get the order we want - self.joe.modified_on = timezone.now() - self.joe.save(update_fields=("modified_on",)) - - survey = self.create_flow("Survey") - contact4.modified_on = timezone.now() - contact4.last_seen_on = datetime(2020, 8, 12, 13, 30, 45, 123456, tzone.utc) - contact4.current_flow = survey - contact4.save(update_fields=("modified_on", "last_seen_on", "current_flow")) - - contact1.refresh_from_db() - contact4.refresh_from_db() - self.joe.refresh_from_db() - - # create contact for other org - hans = self.create_contact("Hans", phone="0788000004", org=self.org2) - - # no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin, self.agent], - results=[contact4, self.joe, contact2, contact1, self.frank], - num_queries=NUM_BASE_SESSION_QUERIES + 7, - ) - self.assertEqual( - { - "uuid": contact4.uuid, - "name": "Don", - "status": "active", - "language": "fra", - "urns": ["tel:+250788000004"], - "groups": [{"uuid": group.uuid, "name": group.name}], - "notes": [], - "fields": {"nickname": "Donnie", "gender": "male"}, - "flow": {"uuid": str(survey.uuid), "name": "Survey"}, - "created_on": format_datetime(contact4.created_on), - "modified_on": format_datetime(contact4.modified_on), - "last_seen_on": "2020-08-12T13:30:45.123456Z", - "blocked": False, - "stopped": False, - }, - response.json()["results"][0], - ) - - # no filtering with token auth - response = self.assertGet( - endpoint_url, - [self.admin], - results=[contact4, self.joe, contact2, contact1, self.frank], - by_token=True, - num_queries=NUM_BASE_TOKEN_QUERIES + 7, - ) - - # with expanded URNs - response = self.assertGet( - endpoint_url + "?expand_urns=true", - [self.user], - results=[contact4, self.joe, contact2, contact1, self.frank], - ) - self.assertEqual( - { - "uuid": contact4.uuid, - "name": "Don", - "status": "active", - "language": "fra", - "urns": [ - { - "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, - "scheme": "tel", - "path": "+250788000004", - "display": None, - } - ], - "groups": [{"uuid": group.uuid, "name": group.name}], - "notes": [], - "fields": {"nickname": "Donnie", "gender": "male"}, - "flow": {"uuid": str(survey.uuid), "name": "Survey"}, - "created_on": format_datetime(contact4.created_on), - "modified_on": format_datetime(contact4.modified_on), - "last_seen_on": "2020-08-12T13:30:45.123456Z", - "blocked": False, - "stopped": False, - }, - response.json()["results"][0], - ) - - # reversed - response = self.assertGet( - endpoint_url + "?reverse=true", - [self.user], - results=[self.frank, contact1, contact2, self.joe, contact4], - ) - - with self.anonymous(self.org): - response = self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin, self.agent], - results=[contact4, self.joe, contact2, contact1, self.frank], - num_queries=NUM_BASE_SESSION_QUERIES + 7, - ) - self.assertEqual( - { - "uuid": contact4.uuid, - "name": "Don", - "anon_display": f"{contact4.id:010}", - "status": "active", - "language": "fra", - "urns": ["tel:********"], - "groups": [{"uuid": group.uuid, "name": group.name}], - "notes": [], - "fields": {"nickname": "Donnie", "gender": "male"}, - "flow": {"uuid": str(survey.uuid), "name": "Survey"}, - "created_on": format_datetime(contact4.created_on), - "modified_on": format_datetime(contact4.modified_on), - "last_seen_on": "2020-08-12T13:30:45.123456Z", - "blocked": False, - "stopped": False, - }, - response.json()["results"][0], - ) - - # with expanded URNs - response = self.assertGet( - endpoint_url + "?expand_urns=true", - [self.user], - results=[contact4, self.joe, contact2, contact1, self.frank], - ) - self.assertEqual( - { - "uuid": contact4.uuid, - "name": "Don", - "anon_display": f"{contact4.id:010}", - "status": "active", - "language": "fra", - "urns": [ - { - "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, - "scheme": "tel", - "path": "********", - "display": None, - } - ], - "groups": [{"uuid": group.uuid, "name": group.name}], - "notes": [], - "fields": {"nickname": "Donnie", "gender": "male"}, - "flow": {"uuid": str(survey.uuid), "name": "Survey"}, - "created_on": format_datetime(contact4.created_on), - "modified_on": format_datetime(contact4.modified_on), - "last_seen_on": "2020-08-12T13:30:45.123456Z", - "blocked": False, - "stopped": False, - }, - response.json()["results"][0], - ) - - # filter by UUID - self.assertGet(endpoint_url + f"?uuid={contact2.uuid}", [self.editor], results=[contact2]) - - # filter by URN (which should be normalized) - self.assertGet(endpoint_url + f"?urn={quote_plus('tel:078-8000004')}", [self.editor], results=[contact4]) - - # error if URN can't be parsed - self.assertGet(endpoint_url + "?urn=12345", [self.editor], errors={None: "Invalid URN: 12345"}) - - # filter by group UUID / name - self.assertGet(endpoint_url + f"?group={group.uuid}", [self.editor], results=[contact4, self.joe]) - self.assertGet(endpoint_url + "?group=Customers", [self.editor], results=[contact4, self.joe]) - - # filter by invalid group - self.assertGet(endpoint_url + "?group=invalid", [self.editor], results=[]) - - # filter by before / after - self.assertGet( - endpoint_url + f"?before={format_datetime(contact1.modified_on)}", - [self.editor], - results=[contact1, self.frank], - ) - self.assertGet( - endpoint_url + f"?after={format_datetime(self.joe.modified_on)}", - [self.editor], - results=[contact4, self.joe], - ) - - # view the deleted contact - self.assertGet( - endpoint_url + "?deleted=true", - [self.editor], - results=[ - { - "uuid": contact3.uuid, - "name": None, - "status": None, - "language": None, - "urns": [], - "groups": [], - "notes": [], - "fields": {}, - "flow": None, - "created_on": format_datetime(contact3.created_on), - "modified_on": format_datetime(contact3.modified_on), - "last_seen_on": None, - "blocked": None, - "stopped": None, - } - ], - ) - - # try to post something other than an object - self.assertPost( - endpoint_url, self.editor, [], errors={"non_field_errors": "Request body should be a single JSON object"} - ) - - # create an empty contact - response = self.assertPost(endpoint_url, self.editor, {}, status=201) - - empty = Contact.objects.get(name=None, is_active=True) - self.assertEqual( - { - "uuid": empty.uuid, - "name": None, - "status": "active", - "language": None, - "urns": [], - "groups": [], - "notes": [], - "fields": {"nickname": None, "gender": None}, - "flow": None, - "created_on": format_datetime(empty.created_on), - "modified_on": format_datetime(empty.modified_on), - "last_seen_on": None, - "blocked": False, - "stopped": False, - }, - response.json(), - ) - - # create with all fields but empty - response = self.assertPost( - endpoint_url, - self.editor, - {"name": None, "language": None, "urns": [], "groups": [], "fields": {}}, - status=201, - ) - - jaqen = Contact.objects.order_by("id").last() - self.assertIsNone(jaqen.name) - self.assertIsNone(jaqen.language) - self.assertEqual(Contact.STATUS_ACTIVE, jaqen.status) - self.assertEqual(set(), set(jaqen.urns.all())) - self.assertEqual(set(), set(jaqen.get_groups())) - self.assertIsNone(jaqen.fields) - - # create with all fields - self.assertPost( - endpoint_url, - self.editor, - { - "name": "Jean", - "language": "fra", - "urns": ["tel:+250783333333", "twitter:JEAN"], - "groups": [group.uuid], - "fields": {"nickname": "Jado"}, - }, - status=201, - ) - - # URNs will be normalized - nickname = self.org.fields.get(key="nickname") - gender = self.org.fields.get(key="gender") - jean = Contact.objects.filter(name="Jean", language="fra").order_by("-pk").first() - self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250783333333", "twitter:jean"}) - self.assertEqual(set(jean.get_groups()), {group}) - self.assertEqual(jean.get_field_value(nickname), "Jado") - - # try to create with group from other org - self.assertPost( - endpoint_url, - self.editor, - {"name": "Jim", "groups": [other_org_group.uuid]}, - errors={"groups": f"No such object: {other_org_group.uuid}"}, - ) - - # try to create with invalid fields - response = self.assertPost( - endpoint_url, - self.editor, - { - "name": "Jim", - "language": "xyz", - "urns": ["1234556789"], - "groups": ["59686b4e-14bc-4160-9376-b649b218c806"], - "fields": {"hmmm": "X"}, - }, - errors={ - "language": "Not a valid ISO639-3 language code.", - "groups": "No such object: 59686b4e-14bc-4160-9376-b649b218c806", - "fields": "Invalid contact field key: hmmm", - ("urns", "0"): "Invalid URN: 1234556789. Ensure phone numbers contain country codes.", - }, - ) - - # update an existing contact by UUID but don't provide any fields - self.assertPost(endpoint_url + f"?uuid={jean.uuid}", self.editor, {}) - - # contact should be unchanged - jean.refresh_from_db() - self.assertEqual(jean.name, "Jean") - self.assertEqual(jean.language, "fra") - self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250783333333", "twitter:jean"}) - self.assertEqual(set(jean.get_groups()), {group}) - self.assertEqual(jean.get_field_value(nickname), "Jado") - - # update by UUID and change all fields - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - { - "name": "Jason Undead", - "language": "ita", - "urns": ["tel:+250784444444"], - "groups": [], - "fields": {"nickname": "Žan", "gender": "frog"}, - }, - ) - - jean.refresh_from_db() - self.assertEqual(jean.name, "Jason Undead") - self.assertEqual(jean.language, "ita") - self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250784444444"}) - self.assertEqual(set(jean.get_groups()), set()) - self.assertEqual(jean.get_field_value(nickname), "Žan") - self.assertEqual(jean.get_field_value(gender), "frog") - - # change the language field - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"name": "Jean II", "language": "eng", "urns": ["tel:+250784444444"], "groups": [], "fields": {}}, - ) - - jean.refresh_from_db() - self.assertEqual(jean.name, "Jean II") - self.assertEqual(jean.language, "eng") - self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250784444444"}) - self.assertEqual(set(jean.get_groups()), set()) - self.assertEqual(jean.get_field_value(nickname), "Žan") - - # update by uuid and remove all fields - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - { - "name": "Jean II", - "language": "eng", - "urns": ["tel:+250784444444"], - "groups": [], - "fields": {"nickname": "", "gender": ""}, - }, - ) - - jean.refresh_from_db() - self.assertEqual(jean.get_field_value(nickname), None) - self.assertEqual(jean.get_field_value(gender), None) - - # update by uuid and update/remove fields - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - { - "name": "Jean II", - "language": "eng", - "urns": ["tel:+250784444444"], - "groups": [], - "fields": {"nickname": "Jado", "gender": ""}, - }, - ) - - jean.refresh_from_db() - self.assertEqual(jean.get_field_value(nickname), "Jado") - self.assertEqual(jean.get_field_value(gender), None) - - # update by URN (which should be normalized) - self.assertPost(endpoint_url + f"?urn={quote_plus('tel:+250-78-4444444')}", self.editor, {"name": "Jean III"}) - - jean.refresh_from_db() - self.assertEqual(jean.name, "Jean III") - - # try to specify URNs field whilst referencing by URN - self.assertPost( - endpoint_url + f"?urn={quote_plus('tel:+250-78-4444444')}", - self.editor, - {"urns": ["tel:+250785555555"]}, - errors={"urns": "Field not allowed when using URN in URL"}, - ) - - # if contact doesn't exist with URN, they're created - self.assertPost( - endpoint_url + f"?urn={quote_plus('tel:+250-78-5555555')}", self.editor, {"name": "Bobby"}, status=201 - ) - - # URN should be normalized - bobby = Contact.objects.get(name="Bobby") - self.assertEqual(set(bobby.urns.values_list("identity", flat=True)), {"tel:+250785555555"}) - - # try to create a contact with a URN belonging to another contact - self.assertPost( - endpoint_url, - self.editor, - {"name": "Robert", "urns": ["tel:+250-78-5555555"]}, - errors={("urns", "0"): "URN is in use by another contact."}, - ) - - # try to update a contact with non-existent UUID - self.assertPost(endpoint_url + "?uuid=ad6acad9-959b-4d70-b144-5de2891e4d00", self.editor, {}, status=404) - - # try to update a contact in another org - self.assertPost(endpoint_url + f"?uuid={hans.uuid}", self.editor, {}, status=404) - - # try to add a contact to a dynamic group - dyn_group = self.create_group("Dynamic Group", query="name = Frank") - ContactGroup.objects.filter(id=dyn_group.id).update(status=ContactGroup.STATUS_READY) - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"groups": [dyn_group.uuid]}, - errors={"groups": "Contact group must not be query based: %s" % dyn_group.uuid}, - ) - - # try to give a contact more than 100 URNs - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"urns": ["twitter:bob%d" % u for u in range(101)]}, - errors={"urns": "Ensure this field has no more than 100 elements."}, - ) - - # try to give a contact more than 100 contact fields - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"fields": {"field_%d" % f: f for f in range(101)}}, - errors={"fields": "Ensure this field has no more than 100 elements."}, - ) - - # ok to give them 100 URNs - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"urns": ["twitter:bob%d" % u for u in range(100)]}, - ) - self.assertEqual(jean.urns.count(), 100) - - # try to move a blocked contact into a group - jean.block(self.user) - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"groups": [group.uuid]}, - errors={"groups": "Non-active contacts can't be added to groups"}, - ) - - # try to update a contact by both UUID and URN - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}&urn={quote_plus('tel:+250784444444')}", - self.editor, - {}, - errors={None: "URL can only contain one of the following parameters: urn, uuid"}, - ) - - # try an empty delete request - self.assertDelete( - endpoint_url, - self.editor, - errors={None: "URL must contain one of the following parameters: urn, uuid"}, - ) - - # delete a contact by UUID - self.assertDelete(endpoint_url + f"?uuid={jean.uuid}", self.editor, status=204) - - jean.refresh_from_db() - self.assertFalse(jean.is_active) - - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.admin, - {}, - errors={"non_field_errors": "Deleted contacts can't be modified."}, - ) - - # create xavier - self.assertPost( - endpoint_url, self.admin, {"name": "Xavier", "urns": ["tel:+250-78-7777777", "twitter:XAVIER"]}, status=201 - ) - - xavier = Contact.objects.get(name="Xavier") - self.assertEqual(set(xavier.urns.values_list("identity", flat=True)), {"twitter:xavier", "tel:+250787777777"}) - - # updating fields by urn should keep all exiting urns - self.assertPost( - endpoint_url + f"?urn={quote_plus('tel:+250787777777')}", self.admin, {"fields": {"gender": "Male"}} - ) - - xavier.refresh_from_db() - self.assertEqual(set(xavier.urns.values_list("identity", flat=True)), {"twitter:xavier", "tel:+250787777777"}) - self.assertEqual(xavier.get_field_value(gender), "Male") - - # delete a contact by URN (which should be normalized) - self.assertDelete(endpoint_url + f"?urn={quote_plus('twitter:XAVIER')}", self.editor, status=204) - - xavier.refresh_from_db() - self.assertFalse(xavier.is_active) - - # try deleting a contact by a non-existent URN - self.assertDelete(endpoint_url + "?urn=twitter:billy", self.editor, status=404) - - # try to delete a contact in another org - self.assertDelete(endpoint_url + f"?uuid={hans.uuid}", self.editor, status=404) - - # add some notes for frank - frank_url = endpoint_url + f"?uuid={self.frank.uuid}" - for i in range(1, 6): - self.assertPost( - frank_url, - self.admin, - {"note": f"Frank is a good guy ({i})"}, - ) - - # four more notes by another user to make sure prefetch works - for i in range(6, 10): - self.assertPost( - frank_url, - self.editor, - {"note": f"Frank is an okay guy ({i})"}, - ) - - self.frank.refresh_from_db() - response = self.assertGet( - frank_url, [self.editor], results=[self.frank], num_queries=NUM_BASE_SESSION_QUERIES + 7 - ) - - # our oldest note should be number 5 - self.assertEqual( - "Frank is a good guy (5)", - response.json()["results"][0]["notes"][0]["text"], - ) - - # our newest note should be number 6 - self.assertEqual( - "Frank is an okay guy (9)", - response.json()["results"][0]["notes"][-1]["text"], - ) - - @mock_mailroom - def test_contacts_as_agent(self, mr_mocks): - endpoint_url = reverse("api.v2.contacts") + ".json" - - self.create_field("gender", "Gender", ContactField.TYPE_TEXT, agent_access=ContactField.ACCESS_NONE) - self.create_field("age", "Age", ContactField.TYPE_NUMBER, agent_access=ContactField.ACCESS_VIEW) - self.create_field("height", "Height", ContactField.TYPE_NUMBER, agent_access=ContactField.ACCESS_EDIT) - - contact = self.create_contact( - "Bob", urns=["telegram:12345"], fields={"gender": "M", "age": "40", "height": "180"} - ) - - # fetching a contact returns only the fields that agents can access - self.assertGet( - endpoint_url + f"?uuid={contact.uuid}", - [self.agent], - results=[ - { - "uuid": str(contact.uuid), - "name": "Bob", - "status": "active", - "language": None, - "urns": ["telegram:12345"], - "groups": [], - "notes": [], - "fields": {"age": "40", "height": "180"}, - "flow": None, - "created_on": format_datetime(contact.created_on), - "modified_on": format_datetime(contact.modified_on), - "last_seen_on": None, - "blocked": False, - "stopped": False, - } - ], - ) - - # can't edit the field that we don't have any access to - self.assertPost( - endpoint_url + f"?uuid={contact.uuid}", - self.agent, - {"fields": {"gender": "M"}}, - errors={"fields": "Invalid contact field key: gender"}, - ) - - # nor the field that we have view access to - self.assertPost( - endpoint_url + f"?uuid={contact.uuid}", - self.agent, - {"fields": {"age": "30"}}, - errors={"fields": "Editing of 'age' values disallowed for current user."}, - ) - - # but can edit the field we have edit access for - self.assertPost( - endpoint_url + f"?uuid={contact.uuid}", - self.agent, - {"fields": {"height": "160"}}, - ) - - def test_contacts_prevent_null_chars(self): - endpoint_url = reverse("api.v2.contacts") + ".json" - - self.create_field("string_field", "String") - self.create_field("number_field", "Number", value_type=ContactField.TYPE_NUMBER) - - # test create with a null chars \u0000 - self.login(self.admin) - self.assertPost( - endpoint_url, - self.editor, - { - "name": "Jean", - "urns": ["tel:+250783333334"], - "fields": {"string_field": "crayons on the wall \u0000, pudding on the wall \x00, yeah \0"}, - }, - errors={("fields", "string_field"): "Null characters are not allowed."}, - ) - - @mock_mailroom - def test_contacts_update_datetime_field(self, mr_mocks): - endpoint_url = reverse("api.v2.contacts") + ".json" - - self.create_field("activated_at", "Tag activation", ContactField.TYPE_DATETIME) - - # update contact with valid date format for the org - DD-MM-YYYY - response = self.assertPost( - endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "31-12-2017"}} - ) - self.assertIsNotNone(response.json()["fields"]["activated_at"]) - - # update contact with valid ISO8601 timestamp value with timezone - response = self.assertPost( - endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "2017-11-11T11:12:13Z"}} - ) - self.assertEqual(response.json()["fields"]["activated_at"], "2017-11-11T13:12:13+02:00") - - # update contact with valid ISO8601 timestamp value, 'T' replaced with space - response = self.assertPost( - endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "2017-11-11 11:12:13Z"}} - ) - self.assertEqual(response.json()["fields"]["activated_at"], "2017-11-11T13:12:13+02:00") - - # update contact with invalid ISO8601 timestamp value without timezone - response = self.assertPost( - endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "2017-11-11T11:12:13"}} - ) - self.assertIsNone(response.json()["fields"]["activated_at"]) - - # update contact with invalid date format for the org - MM-DD-YYYY - response = self.assertPost( - endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "12-31-2017"}} - ) - self.assertIsNone(response.json()["fields"]["activated_at"]) - - # update contact with invalid timestamp value - response = self.assertPost( - endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "el123a41"}} - ) - self.assertIsNone(response.json()["fields"]["activated_at"]) - - @mock_mailroom - def test_contacts_anonymous_org(self, mr_mocks): - endpoint_url = reverse("api.v2.contacts") + ".json" - - group = ContactGroup.get_or_create(self.org, self.admin, "Customers") - - self.assertPost( - endpoint_url, - self.editor, - { - "name": "Jean", - "language": "fra", - "urns": ["tel:+250783333333", "twitter:JEAN"], - "groups": [group.uuid], - "fields": {}, - }, - status=201, - ) - - jean = Contact.objects.filter(name="Jean", language="fra").get() - - with self.anonymous(self.org): - # can't update via URN - self.assertPost( - endpoint_url + "?urn=tel:+250785555555", - self.editor, - {}, - errors={None: "URN lookups not allowed for anonymous organizations"}, - status=400, - ) - - # can't update contact URNs - self.assertPost( - endpoint_url + f"?uuid={jean.uuid}", - self.editor, - {"urns": ["tel:+250786666666"]}, - errors={"urns": "Updating URNs not allowed for anonymous organizations"}, - status=400, - ) - - # output shouldn't include URNs - response = self.assertGet(endpoint_url + f"?uuid={jean.uuid}", [self.admin], results=[jean]) - self.assertEqual(response.json()["results"][0]["urns"], ["tel:********", "twitter:********"]) - - # but can create with URNs - response = self.assertPost( - endpoint_url, - self.admin, - {"name": "Xavier", "urns": ["tel:+250-78-7777777", "twitter:XAVIER"]}, - status=201, - ) - - # TODO should UUID be masked in response?? - xavier = Contact.objects.get(name="Xavier") - self.assertEqual( - set(xavier.urns.values_list("identity", flat=True)), {"tel:+250787777777", "twitter:xavier"} - ) - - # can't filter by URN - self.assertGet( - endpoint_url + f"?urn={quote_plus('tel:+250-78-8000004')}", - [self.admin], - errors={None: "URN lookups not allowed for anonymous organizations"}, - ) - - @mock_mailroom - def test_contact_actions(self, mr_mocks): - endpoint_url = reverse("api.v2.contact_actions") + ".json" - - self.assertGetNotAllowed(endpoint_url) - self.assertPostNotPermitted(endpoint_url, [None, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - for contact in Contact.objects.all(): - contact.release(self.admin) - contact.delete() - - # create some contacts to act on - contact1 = self.create_contact("Ann", phone="+250788000001") - contact2 = self.create_contact("Bob", phone="+250788000002") - contact3 = self.create_contact("Cat", phone="+250788000003") - contact4 = self.create_contact("Don", phone="+250788000004") # a blocked contact - contact5 = self.create_contact("Eve", phone="+250788000005") # a deleted contact - contact4.block(self.user) - contact5.release(self.user) - - group = self.create_group("Testers") - self.create_field("isdeveloper", "Is developer") - self.create_group("Developers", query="isdeveloper = YES") - other_org_group = self.create_group("Testers", org=self.org2) - - # create some waiting runs for some of the contacts - flow = self.create_flow("Favorites") - MockSessionWriter(contact1, flow).wait().save() - MockSessionWriter(contact2, flow).wait().save() - MockSessionWriter(contact3, flow).wait().save() - - self.create_incoming_msg(contact1, "Hello") - self.create_incoming_msg(contact2, "Hello") - self.create_incoming_msg(contact3, "Hello") - self.create_incoming_msg(contact4, "Hello") - - # try adding more contacts to group than this endpoint is allowed to operate on at one time - self.assertPost( - endpoint_url, - self.agent, - {"contacts": [str(x) for x in range(101)], "action": "add", "group": "Testers"}, - errors={"contacts": "Ensure this field has no more than 100 elements."}, - ) - - # try adding all contacts to a group by its name - self.assertPost( - endpoint_url, - self.editor, - { - "contacts": [contact1.uuid, "tel:+250788000002", contact3.uuid, contact4.uuid, contact5.uuid], - "action": "add", - "group": "Testers", - }, - errors={"contacts": "No such object: %s" % contact5.uuid}, - ) - - # try adding a blocked contact to a group - self.assertPost( - endpoint_url, - self.admin, - { - "contacts": [contact1.uuid, contact2.uuid, contact3.uuid, contact4.uuid], - "action": "add", - "group": "Testers", - }, - errors={"non_field_errors": "Non-active contacts cannot be added to groups: %s" % contact4.uuid}, - ) - - # add valid contacts to the group by name - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid, "tel:+250788000002"], "action": "add", "group": "Testers"}, - status=204, - ) - self.assertEqual(set(group.contacts.all()), {contact1, contact2}) - - # try to add to a non-existent group - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid], "action": "add", "group": "Spammers"}, - errors={"group": "No such object: Spammers"}, - ) - - # try to add to a dynamic group - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid], "action": "add", "group": "Developers"}, - errors={"group": "Contact group must not be query based: Developers"}, - ) - - # add contact 3 to a group by its UUID - self.assertPost( - endpoint_url, self.admin, {"contacts": [contact3.uuid], "action": "add", "group": group.uuid}, status=204 - ) - self.assertEqual(set(group.contacts.all()), {contact1, contact2, contact3}) - - # try adding with invalid group UUID - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact3.uuid], "action": "add", "group": "15611256-95b5-46d5-b857-abafe0d32fe9"}, - errors={"group": "No such object: 15611256-95b5-46d5-b857-abafe0d32fe9"}, - ) - - # try to add to a group in another org - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact3.uuid], "action": "add", "group": other_org_group.uuid}, - errors={"group": f"No such object: {other_org_group.uuid}"}, - ) - - # remove contact 2 from group by its name (which is case-insensitive) - self.assertPost( - endpoint_url, self.admin, {"contacts": [contact2.uuid], "action": "remove", "group": "testers"}, status=204 - ) - self.assertEqual(set(group.contacts.all()), {contact1, contact3}) - - # and remove contact 3 from group by its UUID - self.assertPost( - endpoint_url, self.admin, {"contacts": [contact3.uuid], "action": "remove", "group": group.uuid}, status=204 - ) - self.assertEqual(set(group.contacts.all()), {contact1}) - - # try to add to group without specifying a group - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid], "action": "add"}, - errors={"non_field_errors": 'For action "add" you should also specify a group'}, - ) - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid], "action": "add", "group": ""}, - errors={"group": "This field may not be null."}, - ) - - # block all contacts - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid, contact2.uuid, contact3.uuid, contact4.uuid], "action": "block"}, - status=204, - ) - self.assertEqual( - set(Contact.objects.filter(status=Contact.STATUS_BLOCKED)), {contact1, contact2, contact3, contact4} - ) - - # unblock contact 1 - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid], "action": "unblock"}, - status=204, - ) - self.assertEqual(set(self.org.contacts.filter(status=Contact.STATUS_ACTIVE)), {contact1, contact5}) - self.assertEqual(set(self.org.contacts.filter(status=Contact.STATUS_BLOCKED)), {contact2, contact3, contact4}) - - # interrupt any active runs of contacts 1 and 2 - with patch("temba.mailroom.queue_interrupt") as mock_queue_interrupt: - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid, contact2.uuid], "action": "interrupt"}, - status=204, - ) - - mock_queue_interrupt.assert_called_once_with(self.org, contacts=[contact1, contact2]) - - # archive all messages for contacts 1 and 2 - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid, contact2.uuid], "action": "archive_messages"}, - status=204, - ) - self.assertFalse(Msg.objects.filter(contact__in=[contact1, contact2], direction="I", visibility="V").exists()) - self.assertTrue(Msg.objects.filter(contact=contact3, direction="I", visibility="V").exists()) - - # delete contacts 1 and 2 - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact1.uuid, contact2.uuid], "action": "delete"}, - status=204, - ) - self.assertEqual(set(self.org.contacts.filter(is_active=False)), {contact1, contact2, contact5}) - self.assertEqual(set(self.org.contacts.filter(is_active=True)), {contact3, contact4}) - self.assertFalse(Msg.objects.filter(contact__in=[contact1, contact2]).exclude(visibility="D").exists()) - self.assertTrue(Msg.objects.filter(contact=contact3).exclude(visibility="D").exists()) - - # try to provide a group for a non-group action - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact3.uuid], "action": "block", "group": "Testers"}, - errors={"non_field_errors": 'For action "block" you should not specify a group'}, - ) - - # trying to act on zero contacts is an error - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [], "action": "block"}, - errors={"contacts": "Contacts can't be empty."}, - ) - - # try to invoke an invalid action - self.assertPost( - endpoint_url, - self.admin, - {"contacts": [contact3.uuid], "action": "like"}, - errors={"action": '"like" is not a valid choice.'}, - ) - - def test_definitions(self): - endpoint_url = reverse("api.v2.definitions") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - self.import_file("subflow") - flow = Flow.objects.get(name="Parent Flow") - - # all flow dependencies and we should get the child flow - self.assertGet( - endpoint_url + f"?flow={flow.uuid}", - [self.editor], - raw=lambda j: {f["name"] for f in j["flows"]} == {"Child Flow", "Parent Flow"}, - ) - - # export just the parent flow - self.assertGet( - endpoint_url + f"?flow={flow.uuid}&dependencies=none", - [self.editor], - raw=lambda j: {f["name"] for f in j["flows"]} == {"Parent Flow"}, - ) - - # import the clinic app which has campaigns - self.import_file("the_clinic") - - # our catchall flow, all alone - flow = Flow.objects.get(name="Catch All") - self.assertGet( - endpoint_url + f"?flow={flow.uuid}&dependencies=none", - [self.editor], - raw=lambda j: len(j["flows"]) == 1 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 0, - ) - - # with its trigger dependency - self.assertGet( - endpoint_url + f"?flow={flow.uuid}", - [self.editor], - raw=lambda j: len(j["flows"]) == 1 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 1, - ) - - # our registration flow, all alone - flow = Flow.objects.get(name="Register Patient") - self.assertGet( - endpoint_url + f"?flow={flow.uuid}&dependencies=none", - [self.editor], - raw=lambda j: len(j["flows"]) == 1 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 0, - ) - - # touches a lot of stuff - self.assertGet( - endpoint_url + f"?flow={flow.uuid}", - [self.editor], - raw=lambda j: len(j["flows"]) == 6 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 2, - ) - - # ignore campaign dependencies - self.assertGet( - endpoint_url + f"?flow={flow.uuid}&dependencies=flows", - [self.editor], - raw=lambda j: len(j["flows"]) == 2 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 1, - ) - - # add our missed call flow - missed_call = Flow.objects.get(name="Missed Call") - self.assertGet( - endpoint_url + f"?flow={flow.uuid}&flow={missed_call.uuid}&dependencies=all", - [self.editor], - raw=lambda j: len(j["flows"]) == 7 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 3, - ) - - campaign = Campaign.objects.get(name="Appointment Schedule") - self.assertGet( - endpoint_url + f"?campaign={campaign.uuid}&dependencies=none", - [self.editor], - raw=lambda j: len(j["flows"]) == 0 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 0, - ) - - self.assertGet( - endpoint_url + f"?campaign={campaign.uuid}", - [self.editor], - raw=lambda j: len(j["flows"]) == 6 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 2, - ) - - # test an invalid value for dependencies - self.assertGet( - endpoint_url + f"?flow={flow.uuid}&dependencies=xx", - [self.editor], - errors={None: "dependencies must be one of none, flows, all"}, - ) - - # test that flows are migrated - self.import_file("favorites_v13") - - flow = Flow.objects.get(name="Favorites") - self.assertGet( - endpoint_url + f"?flow={flow.uuid}", - [self.editor], - raw=lambda j: len(j["flows"]) == 1 and j["flows"][0]["spec_version"] == Flow.CURRENT_SPEC_VERSION, - ) - - @override_settings(ORG_LIMIT_DEFAULTS={"fields": 10}) - def test_fields(self): - endpoint_url = reverse("api.v2.fields") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotPermitted(endpoint_url, [None, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - nick_name = self.create_field("nick_name", "Nick Name", agent_access=ContactField.ACCESS_EDIT) - registered = self.create_field("registered", "Registered On", value_type=ContactField.TYPE_DATETIME) - self.create_field("not_ours", "Something Else", org=self.org2) - - # add our date field to a campaign event - campaign = Campaign.create(self.org, self.admin, "Reminders", self.create_group("Farmers")) - CampaignEvent.create_flow_event( - self.org, self.admin, campaign, registered, offset=1, unit="W", flow=self.create_flow("Flow") - ) - - deleted = self.create_field("deleted", "Deleted") - deleted.release(self.admin) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - { - "key": "registered", - "name": "Registered On", - "type": "datetime", - "featured": False, - "priority": 0, - "usages": {"campaign_events": 1, "flows": 0, "groups": 0}, - "agent_access": "view", - "label": "Registered On", - "value_type": "datetime", - }, - { - "key": "nick_name", - "name": "Nick Name", - "type": "text", - "featured": False, - "priority": 0, - "usages": {"campaign_events": 0, "flows": 0, "groups": 0}, - "agent_access": "edit", - "label": "Nick Name", - "value_type": "text", - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 1, - ) - - # filter by key - self.assertGet(endpoint_url + "?key=nick_name", [self.editor], results=[nick_name]) - - # try to create empty field - self.assertPost(endpoint_url, self.admin, {}, errors={"non_field_errors": "Field 'name' is required."}) - - # try to create field without type - self.assertPost( - endpoint_url, self.admin, {"name": "goats"}, errors={"non_field_errors": "Field 'type' is required."} - ) - - # try again with some invalid values - self.assertPost( - endpoint_url, - self.admin, - {"name": "!@#$%", "type": "video"}, - errors={"name": "Can only contain letters, numbers and hypens.", "type": '"video" is not a valid choice.'}, - ) - - # try again with some invalid values using deprecated field names - self.assertPost( - endpoint_url, - self.admin, - {"label": "!@#$%", "value_type": "video"}, - errors={ - "label": "Can only contain letters, numbers and hypens.", - "value_type": '"video" is not a valid choice.', - }, - ) - - # try again with a label that would generate an invalid key - self.assertPost( - endpoint_url, - self.admin, - {"name": "HAS", "type": "text"}, - errors={"name": 'Generated key "has" is invalid or a reserved name.'}, - ) - - # try again with a label that's already taken - self.assertPost( - endpoint_url, - self.admin, - {"label": "nick name", "value_type": "text"}, - errors={"label": "This field must be unique."}, - ) - - # create a new field - self.assertPost(endpoint_url, self.editor, {"name": "Age", "type": "number"}, status=201) - - age = ContactField.objects.get( - org=self.org, name="Age", value_type="N", is_proxy=False, is_system=False, is_active=True - ) - - # update a field by its key - self.assertPost(endpoint_url + "?key=age", self.admin, {"name": "Real Age", "type": "datetime"}) - age.refresh_from_db() - self.assertEqual(age.name, "Real Age") - self.assertEqual(age.value_type, "D") - - # try to update with key of deleted field - self.assertPost(endpoint_url + "?key=deleted", self.admin, {"name": "Something", "type": "text"}, status=404) - - # try to update with non-existent key - self.assertPost(endpoint_url + "?key=not_ours", self.admin, {"name": "Something", "type": "text"}, status=404) - - # try to change type of date field used by campaign event - self.assertPost( - endpoint_url + "?key=registered", - self.admin, - {"name": "Registered", "type": "text"}, - errors={"type": "Can't change type of date field being used by campaign events."}, - ) - - CampaignEvent.objects.all().delete() - ContactField.objects.filter(is_system=False).delete() - - for i in range(10): - self.create_field("field%d" % i, "Field%d" % i) - - self.assertPost( - endpoint_url, - self.admin, - {"label": "Age", "value_type": "numeric"}, - errors={None: "Cannot create object because workspace has reached limit of 10."}, - status=409, - ) - - def test_flows(self): - endpoint_url = reverse("api.v2.flows") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - survey = self.get_flow("media_survey") - color = self.get_flow("color") - archived = self.get_flow("favorites") - archived.archive(self.admin) - - # add a campaign message flow that should be filtered out - Flow.create_single_message(self.org, self.admin, dict(eng="Hello world"), "eng") - - # add a flow label - reporting = FlowLabel.create(self.org, self.admin, "Reporting") - color.labels.add(reporting) - - # make it look like joe completed the color flow - FlowRun.objects.create( - org=self.org, flow=color, contact=self.joe, status=FlowRun.STATUS_COMPLETED, exited_on=timezone.now() - ) - - # flow belong to other org - other_org = self.create_flow("Other", org=self.org2) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - { - "uuid": archived.uuid, - "name": "Favorites", - "type": "message", - "archived": True, - "labels": [], - "expires": 720, - "runs": {"active": 0, "waiting": 0, "completed": 0, "interrupted": 0, "expired": 0, "failed": 0}, - "results": [ - { - "key": "color", - "name": "Color", - "categories": ["Red", "Green", "Blue", "Cyan", "Other"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "beer", - "name": "Beer", - "categories": ["Mutzig", "Primus", "Turbo King", "Skol", "Other"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "name", - "name": "Name", - "categories": ["All Responses"], - "node_uuids": [matchers.UUID4String()], - }, - ], - "parent_refs": [], - "created_on": format_datetime(archived.created_on), - "modified_on": format_datetime(archived.modified_on), - }, - { - "uuid": color.uuid, - "name": "Color Flow", - "type": "message", - "archived": False, - "labels": [{"uuid": str(reporting.uuid), "name": "Reporting"}], - "expires": 10080, - "runs": {"active": 0, "waiting": 0, "completed": 1, "interrupted": 0, "expired": 0, "failed": 0}, - "results": [ - { - "key": "color", - "name": "color", - "categories": ["Orange", "Blue", "Other", "Nothing"], - "node_uuids": [matchers.UUID4String()], - } - ], - "parent_refs": [], - "created_on": format_datetime(color.created_on), - "modified_on": format_datetime(color.modified_on), - }, - { - "uuid": survey.uuid, - "name": "Media Survey", - "type": "survey", - "archived": False, - "labels": [], - "expires": 10080, - "runs": {"active": 0, "waiting": 0, "completed": 0, "interrupted": 0, "expired": 0, "failed": 0}, - "results": [ - { - "key": "name", - "name": "Name", - "categories": ["All Responses"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "photo", - "name": "Photo", - "categories": ["All Responses"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "location", - "name": "Location", - "categories": ["All Responses"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "video", - "name": "Video", - "categories": ["All Responses"], - "node_uuids": [matchers.UUID4String()], - }, - ], - "parent_refs": [], - "created_on": format_datetime(survey.created_on), - "modified_on": format_datetime(survey.modified_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 5, - ) - - self.assertGet(endpoint_url, [self.admin2], results=[other_org]) - - # filter by key - self.assertGet(endpoint_url + f"?uuid={color.uuid}", [self.editor], results=[color]) - - # filter by type - self.assertGet(endpoint_url + "?type=message", [self.editor], results=[archived, color]) - self.assertGet(endpoint_url + "?type=survey", [self.editor], results=[survey]) - - # filter by archived - self.assertGet(endpoint_url + "?archived=1", [self.editor], results=[archived]) - self.assertGet(endpoint_url + "?archived=0", [self.editor], results=[color, survey]) - self.assertGet(endpoint_url + "?archived=false", [self.editor], results=[color, survey]) - - # filter by before / after - self.assertGet( - endpoint_url + f"?before={format_datetime(color.modified_on)}", [self.editor], results=[color, survey] - ) - self.assertGet( - endpoint_url + f"?after={format_datetime(color.modified_on)}", [self.editor], results=[archived, color] - ) - - # inactive flows are never returned - archived.is_active = False - archived.save() - - self.assertGet(endpoint_url, [self.editor], results=[color, survey]) - - @patch("temba.flows.models.FlowStart.async_start") - def test_flow_starts(self, mock_async_start): - endpoint_url = reverse("api.v2.flow_starts") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.agent, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - flow = self.get_flow("favorites_v13") - - # try to create an empty flow start - self.assertPost(endpoint_url, self.editor, {}, errors={"flow": "This field is required."}) - - # start a flow with the minimum required parameters - response = self.assertPost( - endpoint_url, self.editor, {"flow": flow.uuid, "contacts": [self.joe.uuid]}, status=201 - ) - - start1 = flow.starts.get(id=response.json()["id"]) - self.assertEqual(start1.flow, flow) - self.assertEqual(set(start1.contacts.all()), {self.joe}) - self.assertEqual(set(start1.groups.all()), set()) - self.assertEqual(start1.exclusions, {"in_a_flow": False, "started_previously": False}) - self.assertEqual(start1.params, {}) - - # check we tried to start the new flow start - mock_async_start.assert_called_once() - mock_async_start.reset_mock() - - # start a flow with all parameters - hans_group = self.create_group("hans", contacts=[self.hans]) - response = self.assertPost( - endpoint_url, - self.admin, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": False, - "extra": {"first_name": "Ryan", "last_name": "Lewis"}, - }, - status=201, - ) - - # assert our new start - start2 = flow.starts.get(id=response.json()["id"]) - self.assertEqual(start2.flow, flow) - self.assertEqual(start2.start_type, FlowStart.TYPE_API) - self.assertEqual(["tel:+12067791212"], start2.urns) - self.assertEqual({self.joe}, set(start2.contacts.all())) - self.assertEqual({hans_group}, set(start2.groups.all())) - self.assertEqual(start2.exclusions, {"in_a_flow": False, "started_previously": True}) - self.assertEqual(start2.params, {"first_name": "Ryan", "last_name": "Lewis"}) - - # check we tried to start the new flow start - mock_async_start.assert_called_once() - mock_async_start.reset_mock() - - response = self.assertPost( - endpoint_url, - self.admin, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": False, - "extra": {"first_name": "Ryan", "last_name": "Lewis"}, - "params": {"first_name": "Bob", "last_name": "Marley"}, - }, - status=201, - ) - - # assert our new start - start3 = flow.starts.get(id=response.json()["id"]) - self.assertEqual(start3.flow, flow) - self.assertEqual(["tel:+12067791212"], start3.urns) - self.assertEqual({self.joe}, set(start3.contacts.all())) - self.assertEqual({hans_group}, set(start3.groups.all())) - self.assertEqual(start3.exclusions, {"in_a_flow": False, "started_previously": True}) - self.assertEqual(start3.params, {"first_name": "Bob", "last_name": "Marley"}) - - # check we tried to start the new flow start - mock_async_start.assert_called_once() - mock_async_start.reset_mock() - - # calls from Zapier have user-agent set to Zapier - response = self.assertPost( - endpoint_url, - self.admin, - {"contacts": [self.joe.uuid], "flow": flow.uuid}, - HTTP_USER_AGENT="Zapier", - status=201, - ) - - # assert our new start has start_type of Zapier - start4 = flow.starts.get(id=response.json()["id"]) - self.assertEqual(FlowStart.TYPE_API_ZAPIER, start4.start_type) - - # try to start a flow with no contact/group/URN - self.assertPost( - endpoint_url, - self.admin, - {"flow": flow.uuid, "restart_participants": True}, - errors={"non_field_errors": "Must specify at least one group, contact or URN"}, - ) - - # should raise validation error for invalid JSON in extra - self.assertPost( - endpoint_url, - self.admin, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": False, - "extra": "YES", - }, - errors={"extra": "Must be a valid JSON object"}, - ) - - # a list is valid JSON, but extra has to be a dict - self.assertPost( - endpoint_url, - self.admin, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": False, - "extra": [1], - }, - errors={"extra": "Must be a valid JSON object"}, - ) - - self.assertPost( - endpoint_url, - self.admin, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": False, - "params": "YES", - }, - errors={"params": "Must be a valid JSON object"}, - ) - - # a list is valid JSON, but extra has to be a dict - self.assertPost( - endpoint_url, - self.admin, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": False, - "params": [1], - }, - errors={"params": "Must be a valid JSON object"}, - ) - - # invalid URN - self.assertPost( - endpoint_url, - self.admin, - {"flow": flow.uuid, "urns": ["foo:bar"], "contacts": [self.joe.uuid]}, - errors={("urns", "0"): "Invalid URN: foo:bar. Ensure phone numbers contain country codes."}, - ) - - # invalid contact uuid - self.assertPost( - endpoint_url, - self.admin, - {"flow": flow.uuid, "urns": ["tel:+12067791212"], "contacts": ["abcde"]}, - errors={"contacts": "No such object: abcde"}, - ) - - # invalid group uuid - self.assertPost( - endpoint_url, - self.admin, - {"flow": flow.uuid, "urns": ["tel:+12067791212"], "groups": ["abcde"]}, - errors={"groups": "No such object: abcde"}, - ) - - # invalid flow uuid - self.assertPost( - endpoint_url, - self.admin, - { - "flow": "abcde", - "urns": ["tel:+12067791212"], - }, - errors={"flow": "No such object: abcde"}, - ) - - # too many groups - group_uuids = [] - for g in range(101): - group_uuids.append(self.create_group("Group %d" % g).uuid) - - self.assertPost( - endpoint_url, - self.admin, - {"flow": flow.uuid, "groups": group_uuids}, - errors={"groups": "Ensure this field has no more than 100 elements."}, - ) - - # check fetching with no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[start4, start3, start2, start1], - num_queries=NUM_BASE_SESSION_QUERIES + 4, - ) - self.assertEqual( - response.json()["results"][1], - { - "id": start3.id, - "uuid": str(start3.uuid), - "flow": {"uuid": flow.uuid, "name": "Favorites"}, - "contacts": [{"uuid": self.joe.uuid, "name": "Joe Blow"}], - "groups": [{"uuid": hans_group.uuid, "name": "hans"}], - "restart_participants": False, - "exclude_active": False, - "status": "pending", - "extra": {"first_name": "Bob", "last_name": "Marley"}, - "params": {"first_name": "Bob", "last_name": "Marley"}, - "created_on": format_datetime(start3.created_on), - "modified_on": format_datetime(start3.modified_on), - }, - ) - - # check filtering by UUID - self.assertGet(endpoint_url + f"?uuid={start2.uuid}", [self.admin], results=[start2]) - - # check filtering by in invalid UUID - self.assertGet(endpoint_url + "?uuid=xyz", [self.editor], errors={None: "Value for uuid must be a valid UUID"}) - - # check filtering by id (deprecated) - response = self.assertGet(endpoint_url + f"?id={start2.id}", [self.editor], results=[start2]) - - response = self.assertPost( - endpoint_url, - self.editor, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": True, - "exclude_active": False, - "extra": {"first_name": "Ryan", "last_name": "Lewis"}, - "params": {"first_name": "Bob", "last_name": "Marley"}, - }, - status=201, - ) - - start4 = flow.starts.get(id=response.json()["id"]) - self.assertEqual({"started_previously": False, "in_a_flow": False}, start4.exclusions) - - response = self.assertPost( - endpoint_url, - self.editor, - { - "urns": ["tel:+12067791212"], - "contacts": [self.joe.uuid], - "groups": [hans_group.uuid], - "flow": flow.uuid, - "restart_participants": True, - "exclude_active": True, - "extra": {"first_name": "Ryan", "last_name": "Lewis"}, - "params": {"first_name": "Bob", "last_name": "Marley"}, - }, - status=201, - ) - - start5 = flow.starts.get(id=response.json()["id"]) - self.assertEqual({"started_previously": False, "in_a_flow": True}, start5.exclusions) - - @override_settings(ORG_LIMIT_DEFAULTS={"globals": 3}) - def test_globals(self): - endpoint_url = reverse("api.v2.globals") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotAllowed(endpoint_url) - - # create some globals - deleted = Global.get_or_create(self.org, self.admin, "org_name", "Org Name", "Acme Ltd") - deleted.release(self.admin) - - global1 = Global.get_or_create(self.org, self.admin, "org_name", "Org Name", "Acme Ltd") - global2 = Global.get_or_create(self.org, self.admin, "access_token", "Access Token", "23464373") - - # on another org - global3 = Global.get_or_create(self.org2, self.admin, "thingy", "Thingy", "xyz") - - # check no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - { - "key": "access_token", - "name": "Access Token", - "value": "23464373", - "modified_on": format_datetime(global2.modified_on), - }, - { - "key": "org_name", - "name": "Org Name", - "value": "Acme Ltd", - "modified_on": format_datetime(global1.modified_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 1, - ) - - # check no filtering with token auth - response = self.assertGet( - endpoint_url, - [self.editor, self.admin], - results=[global2, global1], - by_token=True, - num_queries=NUM_BASE_TOKEN_QUERIES + 1, - ) - - self.assertGet(endpoint_url, [self.admin2], results=[global3]) - - # filter by key - self.assertGet(endpoint_url + "?key=org_name", [self.editor], results=[global1]) - - # filter by before / after - self.assertGet( - endpoint_url + f"?before={format_datetime(global1.modified_on)}", [self.editor], results=[global1] - ) - self.assertGet( - endpoint_url + f"?after={format_datetime(global1.modified_on)}", [self.editor], results=[global2, global1] - ) - - # lets change a global - self.assertPost(endpoint_url + "?key=org_name", self.admin, {"value": "Acme LLC"}) - global1.refresh_from_db() - self.assertEqual(global1.value, "Acme LLC") - - # try to create a global with no name - response = self.assertPost( - endpoint_url, - self.admin, - {"value": "yes"}, - errors={"non_field_errors": "Name is required when creating new global."}, - ) - - # try to create a global with invalid name - response = self.assertPost( - endpoint_url, self.admin, {"name": "!!!#$%^"}, errors={"name": "Name contains illegal characters."} - ) - - # try to create a global with name that creates an invalid key - response = self.assertPost( - endpoint_url, - self.admin, - {"name": "2cool key", "value": "23464373"}, - errors={"name": "Name creates Key that is invalid"}, - ) - - # try to create a global with name that's too long - response = self.assertPost( - endpoint_url, - self.admin, - {"name": "x" * 37}, - errors={"name": "Ensure this field has no more than 36 characters."}, - ) - - # lets create a new global - response = self.assertPost(endpoint_url, self.admin, {"name": "New Global", "value": "23464373"}, status=201) - global3 = Global.objects.get(key="new_global") - self.assertEqual( - response.json(), - { - "key": "new_global", - "name": "New Global", - "value": "23464373", - "modified_on": format_datetime(global3.modified_on), - }, - ) - - # try again now that we've hit the mocked limit of globals per org - self.assertPost( - endpoint_url, - self.admin, - {"name": "Website URL", "value": "http://example.com"}, - errors={None: "Cannot create object because workspace has reached limit of 3."}, - status=409, - ) - - @override_settings(ORG_LIMIT_DEFAULTS={"groups": 10}) - @mock_mailroom - def test_groups(self, mr_mocks): - endpoint_url = reverse("api.v2.groups") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotPermitted(endpoint_url, [None, self.user, self.agent]) - - self.create_field("isdeveloper", "Is developer") - open_tickets = self.org.groups.get(name="Open Tickets") - customers = self.create_group("Customers", [self.frank]) - developers = self.create_group("Developers", query='isdeveloper = "YES"') - ContactGroup.objects.filter(id=developers.id).update(status=ContactGroup.STATUS_READY) - - dynamic = self.create_group("Big Group", query='isdeveloper = "NO"') - ContactGroup.objects.filter(id=dynamic.id).update(status=ContactGroup.STATUS_EVALUATING) - - # an initializing group - ContactGroup.create_manual(self.org, self.admin, "Initializing", status=ContactGroup.STATUS_INITIALIZING) - - # group belong to other org - spammers = ContactGroup.get_or_create(self.org2, self.admin2, "Spammers") - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - { - "uuid": dynamic.uuid, - "name": "Big Group", - "query": 'isdeveloper = "NO"', - "status": "evaluating", - "system": False, - "count": 0, - }, - { - "uuid": developers.uuid, - "name": "Developers", - "query": 'isdeveloper = "YES"', - "status": "ready", - "system": False, - "count": 0, - }, - { - "uuid": customers.uuid, - "name": "Customers", - "query": None, - "status": "ready", - "system": False, - "count": 1, - }, - { - "uuid": open_tickets.uuid, - "name": "Open Tickets", - "query": "tickets > 0", - "status": "ready", - "system": True, - "count": 0, - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 2, - ) - - # filter by UUID - self.assertGet(endpoint_url + f"?uuid={customers.uuid}", [self.editor], results=[customers]) - - # filter by name - self.assertGet(endpoint_url + "?name=developers", [self.editor], results=[developers]) - - # try to filter by both - self.assertGet( - endpoint_url + f"?uuid={customers.uuid}&name=developers", - [self.editor], - errors={None: "You may only specify one of the uuid, name parameters"}, - ) - - # try to create empty group - self.assertPost(endpoint_url, self.admin, {}, errors={"name": "This field is required."}) - - # create new group - response = self.assertPost(endpoint_url, self.admin, {"name": "Reporters"}, status=201) - - reporters = ContactGroup.objects.get(name="Reporters") - self.assertEqual( - response.json(), - { - "uuid": reporters.uuid, - "name": "Reporters", - "query": None, - "status": "ready", - "system": False, - "count": 0, - }, - ) - - # try to create another group with same name - self.assertPost(endpoint_url, self.admin, {"name": "reporters"}, errors={"name": "This field must be unique."}) - - # try to create another group with same name as a system group.. - self.assertPost(endpoint_url, self.admin, {"name": "blocked"}, errors={"name": "This field must be unique."}) - - # it's fine if a group in another org has that name - self.assertPost(endpoint_url, self.admin, {"name": "Spammers"}, status=201) - - # try to create a group with invalid name - self.assertPost( - endpoint_url, self.admin, {"name": '"People"'}, errors={"name": 'Cannot contain the character: "'} - ) - - # try to create a group with name that's too long - self.assertPost( - endpoint_url, - self.admin, - {"name": "x" * 65}, - errors={"name": "Ensure this field has no more than 64 characters."}, - ) - - # update group by UUID - self.assertPost(endpoint_url + f"?uuid={reporters.uuid}", self.admin, {"name": "U-Reporters"}) - - reporters.refresh_from_db() - self.assertEqual(reporters.name, "U-Reporters") - - # can't update a system group - self.assertPost( - endpoint_url + f"?uuid={open_tickets.uuid}", - self.admin, - {"name": "Won't work"}, - errors={None: "Cannot modify system object."}, - status=403, - ) - self.assertTrue(self.org.groups.filter(name="Open Tickets").exists()) - - # can't update a group from other org - self.assertPost(endpoint_url + f"?uuid={spammers.uuid}", self.admin, {"name": "Won't work"}, status=404) - - # try an empty delete request - self.assertDelete( - endpoint_url, self.admin, errors={None: "URL must contain one of the following parameters: uuid"} - ) - - # delete a group by UUID - self.assertDelete(endpoint_url + f"?uuid={reporters.uuid}", self.admin, status=204) - - reporters.refresh_from_db() - self.assertFalse(reporters.is_active) - - # can't delete a system group - self.assertDelete( - endpoint_url + f"?uuid={open_tickets.uuid}", - self.admin, - errors={None: "Cannot delete system object."}, - status=403, - ) - self.assertTrue(self.org.groups.filter(name="Open Tickets").exists()) - - # can't delete a group with a trigger dependency - trigger = Trigger.create( - self.org, - self.admin, - Trigger.TYPE_KEYWORD, - self.create_flow("Test"), - keywords=["block_group"], - match_type=Trigger.MATCH_FIRST_WORD, - ) - trigger.groups.add(customers) - - self.assertDelete( - endpoint_url + f"?uuid={customers.uuid}", - self.admin, - errors={None: "Group is being used by triggers which must be archived first."}, - status=400, - ) - - # or a campaign dependency - trigger.groups.clear() - campaign = Campaign.create(self.org, self.admin, "Reminders", customers) - - self.assertDelete( - endpoint_url + f"?uuid={customers.uuid}", - self.admin, - errors={None: "Group is being used by campaigns which must be archived first."}, - status=400, - ) - - # can't delete a group in another org - self.assertDelete(endpoint_url + f"?uuid={spammers.uuid}", self.admin, status=404) - - campaign.delete() - for group in ContactGroup.objects.filter(is_system=False): - group.release(self.admin) - - for i in range(10): - ContactGroup.create_manual(self.org2, self.admin2, "group%d" % i) - - self.assertPost(endpoint_url, self.admin, {"name": "Reporters"}, status=201) - - ContactGroup.objects.filter(is_system=False, is_active=True).delete() - - for i in range(10): - ContactGroup.create_manual(self.org, self.admin, "group%d" % i) - - self.assertPost( - endpoint_url, - self.admin, - {"name": "Reporters"}, - errors={None: "Cannot create object because workspace has reached limit of 10."}, - status=409, - ) - - def test_labels(self): - endpoint_url = reverse("api.v2.labels") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotPermitted(endpoint_url + "?uuid=123", [None, self.user, self.agent]) - - important = self.create_label("Important") - feedback = self.create_label("Feedback") - - # a deleted label - deleted = self.create_label("Deleted") - deleted.release(self.admin) - - # create label for other org - spam = self.create_label("Spam", org=self.org2) - - msg = self.create_incoming_msg(self.frank, "Hello") - important.toggle_label([msg], add=True) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - {"uuid": str(feedback.uuid), "name": "Feedback", "count": 0}, - {"uuid": str(important.uuid), "name": "Important", "count": 1}, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 2, - ) - - # filter by UUID - self.assertGet(endpoint_url + f"?uuid={feedback.uuid}", [self.editor], results=[feedback]) - - # filter by name - self.assertGet(endpoint_url + "?name=important", [self.editor], results=[important]) - - # try to filter by both - self.assertGet( - endpoint_url + f"?uuid={important.uuid}&name=important", - [self.editor], - errors={None: "You may only specify one of the uuid, name parameters"}, - ) - - # try to create empty label - self.assertPost(endpoint_url, self.editor, {}, errors={"name": "This field is required."}) - - # create new label - response = self.assertPost(endpoint_url, self.editor, {"name": "Interesting"}, status=201) - - interesting = Label.objects.get(name="Interesting") - self.assertEqual(response.json(), {"uuid": str(interesting.uuid), "name": "Interesting", "count": 0}) - - # try to create another label with same name - self.assertPost( - endpoint_url, self.admin, {"name": "interesting"}, errors={"name": "This field must be unique."} - ) - - # it's fine if a label in another org has that name - self.assertPost(endpoint_url, self.admin, {"name": "Spam"}, status=201) - - # try to create a label with invalid name - self.assertPost(endpoint_url, self.admin, {"name": '""'}, errors={"name": 'Cannot contain the character: "'}) - - # try to create a label with name that's too long - self.assertPost( - endpoint_url, - self.admin, - {"name": "x" * 65}, - errors={"name": "Ensure this field has no more than 64 characters."}, - ) - - # update label by UUID - response = self.assertPost(endpoint_url + f"?uuid={interesting.uuid}", self.admin, {"name": "More Interesting"}) - interesting.refresh_from_db() - self.assertEqual(interesting.name, "More Interesting") - - # can't update label from other org - self.assertPost(endpoint_url + f"?uuid={spam.uuid}", self.admin, {"name": "Won't work"}, status=404) - - # try an empty delete request - self.assertDelete( - endpoint_url, self.admin, errors={None: "URL must contain one of the following parameters: uuid"} - ) - - # delete a label by UUID - self.assertDelete(endpoint_url + f"?uuid={interesting.uuid}", self.admin) - interesting.refresh_from_db() - self.assertFalse(interesting.is_active) - - # try to delete a label in another org - self.assertDelete(endpoint_url + f"?uuid={spam.uuid}", self.admin, status=404) - - # try creating a new label after reaching the limit on labels - with override_settings(ORG_LIMIT_DEFAULTS={"labels": self.org.msgs_labels.filter(is_active=True).count()}): - self.assertPost( - endpoint_url, - self.admin, - {"name": "Interesting"}, - errors={None: "Cannot create object because workspace has reached limit of 3."}, - status=409, - ) - - @mock_uuids - def test_media(self): - endpoint_url = reverse("api.v2.media") + ".json" - - self.assertGetNotAllowed(endpoint_url) - self.assertPostNotPermitted(endpoint_url, [None, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - def upload(user, filename: str): - self.login(user) - with open(filename, "rb") as data: - return self.client.post(endpoint_url, {"file": data}, HTTP_X_FORWARDED_HTTPS="https") - - self.login(self.admin) - response = self.client.post(endpoint_url, {}, HTTP_X_FORWARDED_HTTPS="https") - self.assertResponseError(response, "file", "No file was submitted.") - - response = upload(self.agent, f"{settings.MEDIA_ROOT}/test_imports/simple.xlsx") - self.assertResponseError(response, "file", "Unsupported file type.") - - with patch("temba.msgs.models.Media.MAX_UPLOAD_SIZE", 1024): - response = upload(self.editor, f"{settings.MEDIA_ROOT}/test_media/snow.mp4") - self.assertResponseError(response, "file", "Limit for file uploads is 0.0009765625 MB.") - - response = upload(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") - self.assertEqual(201, response.status_code) - self.assertEqual( - { - "uuid": "b97f69f7-5edf-45c7-9fda-d37066eae91d", - "content_type": "image/jpeg", - "url": f"{settings.STORAGE_URL}/orgs/{self.org.id}/media/b97f/b97f69f7-5edf-45c7-9fda-d37066eae91d/steve%20marten.jpg", - "filename": "steve marten.jpg", - "size": 7461, - }, - response.json(), - ) - - media = Media.objects.get() - self.assertEqual(Media.STATUS_READY, media.status) - - @mock_mailroom - def test_messages(self, mr_mocks): - endpoint_url = reverse("api.v2.messages") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - # create some messages - flow = self.create_flow("Test") - joe_msg1 = self.create_incoming_msg(self.joe, "Howdy", flow=flow) - frank_msg1 = self.create_incoming_msg(self.frank, "Bonjour", channel=self.twitter) - joe_msg2 = self.create_outgoing_msg(self.joe, "How are you?", status="Q") - frank_msg2 = self.create_outgoing_msg(self.frank, "Ça va?", status="D") - joe_msg3 = self.create_incoming_msg( - self.joe, "Good", flow=flow, attachments=["image/jpeg:https://example.com/test.jpg"] - ) - frank_msg3 = self.create_incoming_msg(self.frank, "Bien", channel=self.twitter, visibility="A") - frank_msg4 = self.create_outgoing_msg(self.frank, "Ça va?", status="F") - - # add a failed message with no URN or channel - joe_msg4 = self.create_outgoing_msg(self.joe, "Sorry", failed_reason=Msg.FAILED_NO_DESTINATION) - - # add an unhandled message - self.create_incoming_msg(self.joe, "Just in!", status="P") - - # add a deleted message - deleted_msg = self.create_incoming_msg(self.frank, "!@$!%", visibility="D") - - # add message in other org - self.create_incoming_msg(self.hans, "Guten tag!", channel=None) - - # label some of the messages, this will change our modified on as well for our `incoming` view - label = self.create_label("Spam") - - # we do this in two calls so that we can predict ordering later - label.toggle_label([frank_msg3], add=True) - label.toggle_label([frank_msg1], add=True) - label.toggle_label([joe_msg3], add=True) - - frank_msg1.refresh_from_db(fields=("modified_on",)) - joe_msg3.refresh_from_db(fields=("modified_on",)) - - # make this message sent later than other sent message created before it to check ordering of sent messages - frank_msg2.sent_on = timezone.now() - frank_msg2.save(update_fields=("sent_on",)) - - # default response is all messages sorted by created_on - self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin], - results=[joe_msg4, frank_msg4, frank_msg3, joe_msg3, frank_msg2, joe_msg2, frank_msg1, joe_msg1], - num_queries=NUM_BASE_SESSION_QUERIES + 6, - ) - - # filter by inbox - self.assertGet( - endpoint_url + "?folder=INBOX", - [self.admin], - results=[ - { - "id": frank_msg1.id, - "type": "text", - "channel": {"uuid": str(self.twitter.uuid), "name": "Twitter Channel"}, - "contact": {"uuid": str(self.frank.uuid), "name": "Frank"}, - "urn": "twitter:franky", - "text": "Bonjour", - "attachments": [], - "archived": False, - "broadcast": None, - "created_on": format_datetime(frank_msg1.created_on), - "direction": "in", - "flow": None, - "labels": [{"uuid": str(label.uuid), "name": "Spam"}], - "media": None, - "modified_on": format_datetime(frank_msg1.modified_on), - "sent_on": None, - "status": "handled", - "visibility": "visible", - } - ], - num_queries=NUM_BASE_SESSION_QUERIES + 5, - ) - - # filter by incoming, should get deleted messages too - self.assertGet( - endpoint_url + "?folder=incoming", - [self.admin], - results=[joe_msg3, frank_msg1, frank_msg3, deleted_msg, joe_msg1], - ) - - # filter by other folders.. - self.assertGet(endpoint_url + "?folder=flows", [self.admin], results=[joe_msg3, joe_msg1]) - self.assertGet(endpoint_url + "?folder=archived", [self.admin], results=[frank_msg3]) - self.assertGet(endpoint_url + "?folder=outbox", [self.admin], results=[joe_msg2]) - self.assertGet(endpoint_url + "?folder=sent", [self.admin], results=[frank_msg2]) - self.assertGet(endpoint_url + "?folder=failed", [self.admin], results=[joe_msg4, frank_msg4]) - - # filter by invalid folder - self.assertGet(endpoint_url + "?folder=invalid", [self.admin], results=[]) - - # filter by id - self.assertGet(endpoint_url + f"?id={joe_msg3.id}", [self.admin], results=[joe_msg3]) - - # filter by contact - self.assertGet( - endpoint_url + f"?contact={self.joe.uuid}", [self.admin], results=[joe_msg4, joe_msg3, joe_msg2, joe_msg1] - ) - - # filter by invalid contact - self.assertGet(endpoint_url + "?contact=invalid", [self.admin], results=[]) - - # filter by label UUID / name - self.assertGet(endpoint_url + f"?label={label.uuid}", [self.admin], results=[frank_msg3, joe_msg3, frank_msg1]) - self.assertGet(endpoint_url + "?label=Spam", [self.admin], results=[frank_msg3, joe_msg3, frank_msg1]) - - # filter by invalid label - self.assertGet(endpoint_url + "?label=invalid", [self.admin], results=[]) - - # filter by before (inclusive) - self.assertGet( - endpoint_url + f"?folder=incoming&before={format_datetime(frank_msg1.modified_on)}", - [self.editor], - results=[frank_msg1, frank_msg3, deleted_msg, joe_msg1], - ) - - # filter by after (inclusive) - self.assertGet( - endpoint_url + f"?folder=incoming&after={format_datetime(frank_msg1.modified_on)}", - [self.editor], - results=[joe_msg3, frank_msg1], - ) - - # filter by broadcast - broadcast = self.create_broadcast( - self.user, {"eng": {"text": "A beautiful broadcast"}}, contacts=[self.joe, self.frank] - ) - self.assertGet( - endpoint_url + f"?broadcast={broadcast.id}", - [self.editor], - results=broadcast.msgs.order_by("-id"), - ) - - # can't filter with invalid id - self.assertGet(endpoint_url + "?id=xyz", [self.editor], errors={None: "Value for id must be an integer"}) - - # can't filter by more than one of contact, folder, label or broadcast together - for query in ( - f"?contact={self.joe.uuid}&label=Spam", - "?label=Spam&folder=inbox", - "?broadcast=12345&folder=inbox", - "?broadcast=12345&label=Spam", - ): - self.assertGet( - endpoint_url + query, - [self.editor], - errors={None: "You may only specify one of the contact, folder, label, broadcast parameters"}, - ) - - with self.anonymous(self.org): - # for anon orgs, don't return URN values - response = self.assertGet(endpoint_url + f"?id={joe_msg3.id}", [self.admin], results=[joe_msg3]) - self.assertIsNone(response.json()["results"][0]["urn"]) - - # try to create a message with empty request - self.assertPost(endpoint_url, self.admin, {}, errors={"contact": "This field is required."}) - - # try to create empty message - self.assertPost( - endpoint_url, - self.admin, - {"contact": self.joe.uuid}, - errors={"non_field_errors": "Must provide either text or attachments."}, - ) - - # create a new message with just text - which shouldn't need to read anything about the msg from the db - response = self.assertPost( - endpoint_url, - self.admin, - {"contact": self.joe.uuid, "text": "Interesting"}, - status=201, - ) - - msg = Msg.objects.order_by("id").last() - self.assertEqual( - { - "id": msg.id, - "type": "text", - "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, - "contact": {"uuid": str(self.joe.uuid), "name": "Joe Blow"}, - "urn": "tel:+250788123123", - "text": "Interesting", - "attachments": [], - "archived": False, - "broadcast": None, - "created_on": format_datetime(msg.created_on), - "direction": "out", - "flow": None, - "labels": [], - "media": None, - "modified_on": format_datetime(msg.modified_on), - "sent_on": None, - "status": "queued", - "visibility": "visible", - }, - response.json(), - ) - - self.assertEqual( - call(self.org, self.admin, self.joe, "Interesting", [], None), - mr_mocks.calls["msg_send"][-1], - ) - - # try to create a message with an invalid attachment media UUID - self.assertPost( - endpoint_url, - self.admin, - {"contact": self.joe.uuid, "text": "Hi", "attachments": ["xxxx"]}, - errors={"attachments": "No such object: xxxx"}, - ) - - # try to create a message with an non-existent attachment media UUID - self.assertPost( - endpoint_url, - self.admin, - {"contact": self.joe.uuid, "text": "Hi", "attachments": ["67ffe746-8771-40fb-89c1-5388e7ddd439"]}, - errors={"attachments": "No such object: 67ffe746-8771-40fb-89c1-5388e7ddd439"}, - ) - - upload = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") - - # create a new message with an attachment as the media UUID... - self.assertPost( - endpoint_url, self.admin, {"contact": self.joe.uuid, "attachments": [str(upload.uuid)]}, status=201 - ) - self.assertEqual( # check that was sent via mailroom - call(self.org, self.admin, self.joe, "", [f"image/jpeg:{upload.url}"], None), - mr_mocks.calls["msg_send"][-1], - ) - - # create a new message with an attachment as :... - self.assertPost( - endpoint_url, - self.admin, - {"contact": self.joe.uuid, "attachments": [f"image/jpeg:https://example.com/{upload.uuid}.jpg"]}, - status=201, - ) - self.assertEqual( - call(self.org, self.admin, self.joe, "", [f"image/jpeg:{upload.url}"], None), - mr_mocks.calls["msg_send"][-1], - ) - - # try to create a message with too many attachments - self.assertPost( - endpoint_url, - self.admin, - {"contact": self.joe.uuid, "attachments": [str(upload.uuid)] * 11}, - errors={"attachments": "Ensure this field has no more than 10 elements."}, - ) - - # try to create an unsendable message - billy_no_phone = self.create_contact("Billy", urns=[]) - response = self.assertPost( - endpoint_url, - self.admin, - {"contact": billy_no_phone.uuid, "text": "well?"}, - status=201, - ) - - msg_json = response.json() - self.assertIsNone(msg_json["channel"]) - self.assertIsNone(msg_json["urn"]) - self.assertEqual("failed", msg_json["status"]) - - def test_message_actions(self): - endpoint_url = reverse("api.v2.message_actions") + ".json" - - self.assertGetNotAllowed(endpoint_url) - self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) - self.assertDeleteNotAllowed(endpoint_url) - - # create some messages to act on - msg1 = self.create_incoming_msg(self.joe, "Msg #1") - msg2 = self.create_incoming_msg(self.joe, "Msg #2") - msg3 = self.create_incoming_msg(self.joe, "Msg #3") - label = self.create_label("Test") - - # add label by name to messages 1 and 2 - self.assertPost( - endpoint_url, self.editor, {"messages": [msg1.id, msg2.id], "action": "label", "label": "Test"}, status=204 - ) - self.assertEqual(set(label.get_messages()), {msg1, msg2}) - - # add label by its UUID to message 3 - self.assertPost( - endpoint_url, self.admin, {"messages": [msg3.id], "action": "label", "label": str(label.uuid)}, status=204 - ) - self.assertEqual(set(label.get_messages()), {msg1, msg2, msg3}) - - # try to label with an invalid UUID - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id], "action": "label", "label": "nope"}, - errors={"label": "No such object: nope"}, - ) - - # remove label from message 2 by name (which is case-insensitive) - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg2.id], "action": "unlabel", "label": "test"}, - status=204, - ) - self.assertEqual(set(label.get_messages()), {msg1, msg3}) - - # and remove from messages 1 and 3 by UUID - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id, msg3.id], "action": "unlabel", "label": str(label.uuid)}, - status=204, - ) - self.assertEqual(set(label.get_messages()), set()) - - # add new label via label_name - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg2.id, msg3.id], "action": "label", "label_name": "New"}, - status=204, - ) - new_label = Label.objects.get(org=self.org, name="New", is_active=True) - self.assertEqual(set(new_label.get_messages()), {msg2, msg3}) - - # no difference if label already exists as it does now - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id], "action": "label", "label_name": "New"}, - status=204, - ) - self.assertEqual(set(new_label.get_messages()), {msg1, msg2, msg3}) - - # can also remove by label_name - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg3.id], "action": "unlabel", "label_name": "New"}, - status=204, - ) - self.assertEqual(set(new_label.get_messages()), {msg1, msg2}) - - # and no error if label doesn't exist - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg3.id], "action": "unlabel", "label_name": "XYZ"}, - status=204, - ) - # and label not lazy created in this case - self.assertIsNone(Label.objects.filter(name="XYZ").first()) - - # try to use invalid label name - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id, msg2.id], "action": "label", "label_name": '"Hi"'}, - errors={"label_name": 'Cannot contain the character: "'}, - ) - - # try to label without specifying a label - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id, msg2.id], "action": "label"}, - errors={"non_field_errors": 'For action "label" you should also specify a label'}, - ) - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id, msg2.id], "action": "label", "label": ""}, - errors={"label": "This field may not be null."}, - ) - - # try to provide both label and label_name - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id], "action": "label", "label": "Test", "label_name": "Test"}, - errors={"non_field_errors": "Can't specify both label and label_name."}, - ) - - # archive all messages - self.assertPost( - endpoint_url, self.admin, {"messages": [msg1.id, msg2.id, msg3.id], "action": "archive"}, status=204 - ) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_ARCHIVED)), {msg1, msg2, msg3}) - - # restore message 1 - self.assertPost(endpoint_url, self.admin, {"messages": [msg1.id], "action": "restore"}, status=204) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_VISIBLE)), {msg1}) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_ARCHIVED)), {msg2, msg3}) - - # delete messages 2 - self.assertPost(endpoint_url, self.admin, {"messages": [msg2.id], "action": "delete"}, status=204) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_VISIBLE)), {msg1}) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_ARCHIVED)), {msg3}) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_DELETED_BY_USER)), {msg2}) - - # try to act on a a valid message and a deleted message - response = self.assertPost( - endpoint_url, self.admin, {"messages": [msg2.id, msg3.id], "action": "restore"}, status=200 - ) - - # should get a partial success - self.assertEqual(response.json(), {"failures": [msg2.id]}) - self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_VISIBLE)), {msg1, msg3}) - - # try to act on an outgoing message - msg4 = self.create_outgoing_msg(self.joe, "Hi Joe") - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id, msg4.id], "action": "archive"}, - errors={"messages": f"Not an incoming message: {msg4.id}"}, - ) - - # try to provide a label for a non-labelling action - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id], "action": "archive", "label": "Test"}, - errors={"non_field_errors": 'For action "archive" you should not specify a label'}, - ) - - # try to invoke an invalid action - self.assertPost( - endpoint_url, - self.admin, - {"messages": [msg1.id], "action": "like"}, - errors={"action": '"like" is not a valid choice.'}, - ) - - def test_runs(self): - endpoint_url = reverse("api.v2.runs") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - flow1 = self.get_flow("color_v13") - flow2 = flow1.clone(self.user) - - flow1_nodes = flow1.get_definition()["nodes"] - color_prompt = flow1_nodes[0] - color_split = flow1_nodes[4] - blue_reply = flow1_nodes[2] - - start1 = FlowStart.create(flow1, self.admin, contacts=[self.joe]) - joe_msg = self.create_incoming_msg(self.joe, "it is blue") - frank_msg = self.create_incoming_msg(self.frank, "Indigo") - - joe_run1 = ( - MockSessionWriter(self.joe, flow1, start=start1) - .visit(color_prompt) - .visit(color_split) - .wait() - .resume(msg=joe_msg) - .set_result("Color", "blue", "Blue", "it is blue") - .visit(blue_reply) - .complete() - .save() - ).session.runs.get() - - frank_run1 = ( - MockSessionWriter(self.frank, flow1) - .visit(color_prompt) - .visit(color_split) - .wait() - .resume(msg=frank_msg) - .set_result("Color", "Indigo", "Other", "Indigo") - .wait() - .save() - ).session.runs.get() - - joe_run2 = ( - MockSessionWriter(self.joe, flow1).visit(color_prompt).visit(color_split).wait().save() - ).session.runs.get() - frank_run2 = ( - MockSessionWriter(self.frank, flow1).visit(color_prompt).visit(color_split).wait().save() - ).session.runs.get() - - joe_run3 = MockSessionWriter(self.joe, flow2).wait().save().session.runs.get() - - # add a run for another org - flow3 = self.create_flow("Test", org=self.org2) - MockSessionWriter(self.hans, flow3).wait().save() - - # refresh runs which will have been modified by being interrupted - joe_run1.refresh_from_db() - joe_run2.refresh_from_db() - frank_run1.refresh_from_db() - frank_run2.refresh_from_db() - - # no filtering - response = self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[joe_run3, joe_run2, frank_run2, frank_run1, joe_run1], - num_queries=NUM_BASE_SESSION_QUERIES + 6, - ) - resp_json = response.json() - self.assertEqual( - { - "id": frank_run2.id, - "uuid": str(frank_run2.uuid), - "flow": {"uuid": str(flow1.uuid), "name": "Colors"}, - "contact": { - "uuid": str(self.frank.uuid), - "name": self.frank.name, - "urn": "twitter:franky", - "urn_display": "franky", - }, - "start": None, - "responded": False, - "path": [ - { - "node": color_prompt["uuid"], - "time": format_datetime(iso8601.parse_date(frank_run2.path[0]["arrived_on"])), - }, - { - "node": color_split["uuid"], - "time": format_datetime(iso8601.parse_date(frank_run2.path[1]["arrived_on"])), - }, - ], - "values": {}, - "created_on": format_datetime(frank_run2.created_on), - "modified_on": format_datetime(frank_run2.modified_on), - "exited_on": None, - "exit_type": None, - }, - resp_json["results"][2], - ) - self.assertEqual( - { - "id": joe_run1.id, - "uuid": str(joe_run1.uuid), - "flow": {"uuid": str(flow1.uuid), "name": "Colors"}, - "contact": { - "uuid": str(self.joe.uuid), - "name": self.joe.name, - "urn": "tel:+250788123123", - "urn_display": "0788 123 123", - }, - "start": {"uuid": str(joe_run1.start.uuid)}, - "responded": True, - "path": [ - { - "node": color_prompt["uuid"], - "time": format_datetime(iso8601.parse_date(joe_run1.path[0]["arrived_on"])), - }, - { - "node": color_split["uuid"], - "time": format_datetime(iso8601.parse_date(joe_run1.path[1]["arrived_on"])), - }, - { - "node": blue_reply["uuid"], - "time": format_datetime(iso8601.parse_date(joe_run1.path[2]["arrived_on"])), - }, - ], - "values": { - "color": { - "value": "blue", - "category": "Blue", - "node": color_split["uuid"], - "time": format_datetime(iso8601.parse_date(joe_run1.results["color"]["created_on"])), - "name": "Color", - "input": "it is blue", - } - }, - "created_on": format_datetime(joe_run1.created_on), - "modified_on": format_datetime(joe_run1.modified_on), - "exited_on": format_datetime(joe_run1.exited_on), - "exit_type": "completed", - }, - resp_json["results"][4], - ) - - # can request without path data - response = self.assertGet( - endpoint_url + "?paths=false", [self.editor], results=[joe_run3, joe_run2, frank_run2, frank_run1, joe_run1] - ) - resp_json = response.json() - self.assertEqual( - { - "id": frank_run2.id, - "uuid": str(frank_run2.uuid), - "flow": {"uuid": str(flow1.uuid), "name": "Colors"}, - "contact": { - "uuid": str(self.frank.uuid), - "name": self.frank.name, - "urn": "twitter:franky", - "urn_display": "franky", - }, - "start": None, - "responded": False, - "path": None, - "values": {}, - "created_on": format_datetime(frank_run2.created_on), - "modified_on": format_datetime(frank_run2.modified_on), - "exited_on": None, - "exit_type": None, - }, - resp_json["results"][2], - ) - - # reversed - self.assertGet( - endpoint_url + "?reverse=true", - [self.editor], - results=[joe_run1, frank_run1, frank_run2, joe_run2, joe_run3], - ) - - # filter by id - self.assertGet(endpoint_url + f"?id={frank_run2.id}", [self.admin], results=[frank_run2]) - - # anon orgs should not have a URN field - with self.anonymous(self.org): - response = self.assertGet(endpoint_url + f"?id={frank_run2.id}", [self.admin], results=[frank_run2]) - self.assertEqual( - { - "id": frank_run2.pk, - "uuid": str(frank_run2.uuid), - "flow": {"uuid": flow1.uuid, "name": "Colors"}, - "contact": { - "uuid": self.frank.uuid, - "name": self.frank.name, - "urn": "twitter:********", - "urn_display": None, - "anon_display": f"{self.frank.id:010}", - }, - "start": None, - "responded": False, - "path": [ - { - "node": color_prompt["uuid"], - "time": format_datetime(iso8601.parse_date(frank_run2.path[0]["arrived_on"])), - }, - { - "node": color_split["uuid"], - "time": format_datetime(iso8601.parse_date(frank_run2.path[1]["arrived_on"])), - }, - ], - "values": {}, - "created_on": format_datetime(frank_run2.created_on), - "modified_on": format_datetime(frank_run2.modified_on), - "exited_on": None, - "exit_type": None, - }, - response.json()["results"][0], - ) - - # filter by uuid - self.assertGet(endpoint_url + f"?uuid={frank_run2.uuid}", [self.admin], results=[frank_run2]) - - # filter by id and uuid - self.assertGet(endpoint_url + f"?uuid={frank_run2.uuid}&id={joe_run1.id}", [self.admin], results=[]) - self.assertGet(endpoint_url + f"?uuid={frank_run2.uuid}&id={frank_run2.id}", [self.admin], results=[frank_run2]) - - # filter by flow - self.assertGet( - endpoint_url + f"?flow={flow1.uuid}", [self.admin], results=[joe_run2, frank_run2, frank_run1, joe_run1] - ) - - # doesn't work if flow is inactive - flow1.is_active = False - flow1.save() - - self.assertGet(endpoint_url + f"?flow={flow1.uuid}", [self.admin], results=[]) - - # restore to active - flow1.is_active = True - flow1.save() - - # filter by invalid flow - self.assertGet(endpoint_url + "?flow=invalid", [self.admin], results=[]) - - # filter by flow + responded - self.assertGet( - endpoint_url + f"?flow={flow1.uuid}&responded=TrUe", [self.admin], results=[frank_run1, joe_run1] - ) - - # filter by contact - self.assertGet(endpoint_url + f"?contact={self.joe.uuid}", [self.admin], results=[joe_run3, joe_run2, joe_run1]) - - # filter by invalid contact - self.assertGet(endpoint_url + "?contact=invalid", [self.admin], results=[]) - - # filter by contact + responded - self.assertGet(endpoint_url + f"?contact={self.joe.uuid}&responded=yes", [self.admin], results=[joe_run1]) - - # filter by before / after - self.assertGet( - endpoint_url + f"?before={format_datetime(frank_run1.modified_on)}", - [self.admin], - results=[frank_run1, joe_run1], - ) - self.assertGet( - endpoint_url + f"?after={format_datetime(frank_run1.modified_on)}", - [self.admin], - results=[joe_run3, joe_run2, frank_run2, frank_run1], - ) - - # filter by invalid before / after - self.assertGet(endpoint_url + "?before=longago", [self.admin], results=[]) - self.assertGet(endpoint_url + "?after=thefuture", [self.admin], results=[]) - - # can't filter by both contact and flow together - self.assertGet( - endpoint_url + f"?contact={self.joe.uuid}&flow={flow1.uuid}", - [self.admin], - errors={None: "You may only specify one of the contact, flow parameters"}, - ) - - def test_optins(self): - endpoint_url = reverse("api.v2.optins") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None, self.agent]) - self.assertPostNotPermitted(endpoint_url, [None, self.agent, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - # create some optins - polls = OptIn.create(self.org, self.admin, "Polls") - offers = OptIn.create(self.org, self.admin, "Offers") - OptIn.create(self.org2, self.admin, "Promos") - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - { - "uuid": str(offers.uuid), - "name": "Offers", - "created_on": format_datetime(offers.created_on), - }, - { - "uuid": str(polls.uuid), - "name": "Polls", - "created_on": format_datetime(polls.created_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 1, - ) - - # try to create empty optin - self.assertPost(endpoint_url, self.admin, {}, errors={"name": "This field is required."}) - - # create new optin - response = self.assertPost(endpoint_url, self.admin, {"name": "Alerts"}, status=201) - - alerts = OptIn.objects.get(name="Alerts") - self.assertEqual( - response.json(), - { - "uuid": str(alerts.uuid), - "name": "Alerts", - "created_on": matchers.ISODate(), - }, - ) - - # try to create another optin with same name - self.assertPost(endpoint_url, self.admin, {"name": "Alerts"}, errors={"name": "This field must be unique."}) - - # it's fine if a optin in another org has that name - self.assertPost(endpoint_url, self.editor, {"name": "Promos"}, status=201) - - # try to create a optin with invalid name - self.assertPost(endpoint_url, self.admin, {"name": '"Hi"'}, errors={"name": 'Cannot contain the character: "'}) - - # try to create a optin with name that's too long - self.assertPost( - endpoint_url, - self.admin, - {"name": "x" * 65}, - errors={"name": "Ensure this field has no more than 64 characters."}, - ) - - def test_resthooks(self): - hooks_url = reverse("api.v2.resthooks") + ".json" - subs_url = reverse("api.v2.resthook_subscribers") + ".json" - events_url = reverse("api.v2.resthook_events") + ".json" - - self.assertGetNotPermitted(hooks_url, [None, self.agent, self.user]) - self.assertPostNotAllowed(hooks_url) - self.assertDeleteNotAllowed(hooks_url) - - self.assertGetNotPermitted(subs_url, [None, self.agent, self.user]) - self.assertPostNotPermitted(subs_url, [None, self.agent, self.user]) - self.assertDeleteNotPermitted(subs_url, [None, self.agent, self.user]) - - self.assertGetNotPermitted(events_url, [None, self.agent, self.user]) - self.assertPostNotAllowed(events_url) - self.assertDeleteNotAllowed(events_url) - - # create some resthooks - resthook1 = Resthook.get_or_create(self.org, "new-mother", self.admin) - resthook2 = Resthook.get_or_create(self.org, "new-father", self.admin) - resthook3 = Resthook.get_or_create(self.org, "not-active", self.admin) - resthook3.is_active = False - resthook3.save() - - # create a resthook for another org - other_org_resthook = Resthook.get_or_create(self.org2, "spam", self.admin2) - - # fetch hooks with no filtering - self.assertGet( - hooks_url, - [self.editor, self.admin], - results=[ - { - "resthook": "new-father", - "created_on": format_datetime(resthook2.created_on), - "modified_on": format_datetime(resthook2.modified_on), - }, - { - "resthook": "new-mother", - "created_on": format_datetime(resthook1.created_on), - "modified_on": format_datetime(resthook1.modified_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 1, - ) - - # try to create empty subscription - self.assertPost( - subs_url, - self.admin, - {}, - errors={"resthook": "This field is required.", "target_url": "This field is required."}, - ) - - # try to create one for resthook in other org - self.assertPost( - subs_url, - self.admin, - {"resthook": "spam", "target_url": "https://foo.bar/"}, - errors={"resthook": "No resthook with slug: spam"}, - ) - - # create subscribers on each resthook - self.assertPost( - subs_url, self.editor, {"resthook": "new-mother", "target_url": "https://foo.bar/mothers"}, status=201 - ) - self.assertPost( - subs_url, self.admin, {"resthook": "new-father", "target_url": "https://foo.bar/fathers"}, status=201 - ) - - hook1_subscriber = resthook1.subscribers.get() - hook2_subscriber = resthook2.subscribers.get() - - # create a subscriber on our other resthook - other_org_subscriber = other_org_resthook.add_subscriber("https://bar.foo", self.admin2) - - # fetch subscribers with no filtering - self.assertGet( - subs_url, - [self.editor, self.admin], - results=[ - { - "id": hook2_subscriber.id, - "resthook": "new-father", - "target_url": "https://foo.bar/fathers", - "created_on": format_datetime(hook2_subscriber.created_on), - }, - { - "id": hook1_subscriber.id, - "resthook": "new-mother", - "target_url": "https://foo.bar/mothers", - "created_on": format_datetime(hook1_subscriber.created_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 1, - ) - - # filter by id - self.assertGet(subs_url + f"?id={hook1_subscriber.id}", [self.editor], results=[hook1_subscriber]) - - # filter by resthook - self.assertGet(subs_url + "?resthook=new-father", [self.editor], results=[hook2_subscriber]) - - # remove a subscriber - self.assertDelete(subs_url + f"?id={hook2_subscriber.id}", self.admin) - - # subscriber should no longer be active - hook2_subscriber.refresh_from_db() - self.assertFalse(hook2_subscriber.is_active) - - # try to delete without providing id - self.assertDelete( - subs_url + "?", self.editor, errors={None: "URL must contain one of the following parameters: id"} - ) - - # try to delete a subscriber from another org - self.assertDelete(subs_url + f"?id={other_org_subscriber.id}", self.editor, status=404) - - # create some events on our resthooks - event1 = WebHookEvent.objects.create( - org=self.org, - resthook=resthook1, - data={"event": "new mother", "values": {"name": "Greg"}, "steps": {"uuid": "abcde"}}, - ) - event2 = WebHookEvent.objects.create( - org=self.org, - resthook=resthook2, - data={"event": "new father", "values": {"name": "Yo"}, "steps": {"uuid": "12345"}}, - ) - - # fetch events with no filtering - self.assertGet( - events_url, - [self.editor, self.admin], - results=[ - { - "resthook": "new-father", - "created_on": format_datetime(event2.created_on), - "data": {"event": "new father", "values": {"name": "Yo"}, "steps": {"uuid": "12345"}}, - }, - { - "resthook": "new-mother", - "created_on": format_datetime(event1.created_on), - "data": {"event": "new mother", "values": {"name": "Greg"}, "steps": {"uuid": "abcde"}}, - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 1, - ) - - @mock_mailroom - def test_tickets(self, mr_mocks): - endpoint_url = reverse("api.v2.tickets") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - # create some tickets - ann = self.create_contact("Ann", urns=["twitter:annie"]) - bob = self.create_contact("Bob", urns=["twitter:bobby"]) - flow = self.create_flow("Support") - - ticket1 = self.create_ticket( - ann, opened_by=self.admin, closed_on=datetime(2021, 1, 1, 12, 30, 45, 123456, tzone.utc) - ) - ticket2 = self.create_ticket(bob, opened_in=flow) - ticket3 = self.create_ticket(bob, assignee=self.agent) - - # on another org - self.create_ticket(self.create_contact("Jim", urns=["twitter:jimmy"], org=self.org2)) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor, self.admin, self.agent], - results=[ - { - "uuid": str(ticket3.uuid), - "assignee": {"email": "agent@nyaruka.com", "name": "Agnes"}, - "contact": {"uuid": str(bob.uuid), "name": "Bob"}, - "status": "open", - "topic": {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"}, - "body": None, - "opened_on": format_datetime(ticket3.opened_on), - "opened_by": None, - "opened_in": None, - "modified_on": format_datetime(ticket3.modified_on), - "closed_on": None, - }, - { - "uuid": str(ticket2.uuid), - "assignee": None, - "contact": {"uuid": str(bob.uuid), "name": "Bob"}, - "status": "open", - "topic": {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"}, - "body": None, - "opened_on": format_datetime(ticket2.opened_on), - "opened_by": None, - "opened_in": {"uuid": str(flow.uuid), "name": "Support"}, - "modified_on": format_datetime(ticket2.modified_on), - "closed_on": None, - }, - { - "uuid": str(ticket1.uuid), - "assignee": None, - "contact": {"uuid": str(ann.uuid), "name": "Ann"}, - "status": "closed", - "topic": {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"}, - "body": None, - "opened_on": format_datetime(ticket1.opened_on), - "opened_by": {"email": "admin@nyaruka.com", "name": "Andy"}, - "opened_in": None, - "modified_on": format_datetime(ticket1.modified_on), - "closed_on": "2021-01-01T12:30:45.123456Z", - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 6, - ) - - # filter by contact uuid (not there) - self.assertGet(endpoint_url + "?contact=09d23a05-47fe-11e4-bfe9-b8f6b119e9ab", [self.admin], results=[]) - - # filter by contact uuid present - self.assertGet(endpoint_url + f"?contact={bob.uuid}", [self.admin], results=[ticket3, ticket2]) - - # filter further by ticket uuid - self.assertGet(endpoint_url + f"?uuid={ticket3.uuid}", [self.admin], results=[ticket3]) - - @mock_mailroom - def test_ticket_actions(self, mr_mocks): - endpoint_url = reverse("api.v2.ticket_actions") + ".json" - - self.assertGetNotAllowed(endpoint_url) - self.assertPostNotPermitted(endpoint_url, [None, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - # create some tickets - sales = Topic.create(self.org, self.admin, "Sales") - ticket1 = self.create_ticket(self.joe, closed_on=datetime(2021, 1, 1, 12, 30, 45, 123456, tzone.utc)) - ticket2 = self.create_ticket(self.joe) - self.create_ticket(self.frank) - - # on another org - ticket4 = self.create_ticket(self.create_contact("Jim", urns=["twitter:jimmy"], org=self.org2)) - - # try actioning more tickets than this endpoint is allowed to operate on at one time - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(x) for x in range(101)], "action": "close"}, - errors={"tickets": "Ensure this field has no more than 100 elements."}, - ) - - # try actioning a ticket which is not in this org - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid), str(ticket4.uuid)], "action": "close"}, - errors={"tickets": f"No such object: {ticket4.uuid}"}, - ) - - # try to close tickets without specifying any tickets - self.assertPost( - endpoint_url, - self.agent, - {"action": "close"}, - errors={"tickets": "This field is required."}, - ) - - # try to assign ticket without specifying assignee - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid)], "action": "assign"}, - errors={"non_field_errors": 'For action "assign" you must specify the assignee'}, - ) - - # try to add a note without specifying note - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid)], "action": "add_note"}, - errors={"non_field_errors": 'For action "add_note" you must specify the note'}, - ) - - # try to change topic without specifying topic - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid)], "action": "change_topic"}, - errors={"non_field_errors": 'For action "change_topic" you must specify the topic'}, - ) - - # assign valid tickets to a user - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "assign", "assignee": "agent@nyaruka.com"}, - status=204, - ) - - ticket1.refresh_from_db() - ticket2.refresh_from_db() - self.assertEqual(self.agent, ticket1.assignee) - self.assertEqual(self.agent, ticket2.assignee) - - # unassign tickets - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid)], "action": "assign", "assignee": None}, - status=204, - ) - - ticket1.refresh_from_db() - self.assertIsNone(ticket1.assignee) - - # add a note to tickets - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "add_note", "note": "Looks important"}, - status=204, - ) - - self.assertEqual("Looks important", ticket1.events.last().note) - self.assertEqual("Looks important", ticket2.events.last().note) - - # change topic of tickets - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "change_topic", "topic": str(sales.uuid)}, - status=204, - ) - - ticket1.refresh_from_db() - ticket2.refresh_from_db() - self.assertEqual(sales, ticket1.topic) - self.assertEqual(sales, ticket2.topic) - - # close tickets - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "close"}, - status=204, - ) - - ticket1.refresh_from_db() - ticket2.refresh_from_db() - self.assertEqual("C", ticket1.status) - self.assertEqual("C", ticket2.status) - - # and finally reopen them - self.assertPost( - endpoint_url, - self.agent, - {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "reopen"}, - status=204, - ) - - ticket1.refresh_from_db() - ticket2.refresh_from_db() - self.assertEqual("O", ticket1.status) - self.assertEqual("O", ticket2.status) - - def test_topics(self): - endpoint_url = reverse("api.v2.topics") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotPermitted(endpoint_url, [None, self.agent, self.user]) - self.assertDeleteNotAllowed(endpoint_url) - - # create some topics - support = Topic.create(self.org, self.admin, "Support") - sales = Topic.create(self.org, self.admin, "Sales") - other_org = Topic.create(self.org2, self.admin, "Bugs") - - contact = self.create_contact("Ann", phone="+1234567890") - self.create_ticket(contact, topic=support) - - # no filtering - self.assertGet( - endpoint_url, - [self.user, self.editor], - results=[ - { - "uuid": str(sales.uuid), - "name": "Sales", - "counts": {"open": 0, "closed": 0}, - "system": False, - "created_on": format_datetime(sales.created_on), - }, - { - "uuid": str(support.uuid), - "name": "Support", - "counts": {"open": 1, "closed": 0}, - "system": False, - "created_on": format_datetime(support.created_on), - }, - { - "uuid": str(self.org.default_ticket_topic.uuid), - "name": "General", - "counts": {"open": 0, "closed": 0}, - "system": True, - "created_on": format_datetime(self.org.default_ticket_topic.created_on), - }, - ], - num_queries=NUM_BASE_SESSION_QUERIES + 3, - ) - - # try to create empty topic - response = self.assertPost(endpoint_url, self.editor, {}, errors={"name": "This field is required."}) - - # create new topic - response = self.assertPost(endpoint_url, self.editor, {"name": "Food"}, status=201) - - food = Topic.objects.get(name="Food") - self.assertEqual( - response.json(), - { - "uuid": str(food.uuid), - "name": "Food", - "counts": {"open": 0, "closed": 0}, - "system": False, - "created_on": matchers.ISODate(), - }, - ) - - # try to create another topic with same name - self.assertPost(endpoint_url, self.editor, {"name": "Food"}, errors={"name": "This field must be unique."}) - - # it's fine if a topic in another org has that name - self.assertPost(endpoint_url, self.editor, {"name": "Bugs"}, status=201) - - # try to create a topic with invalid name - self.assertPost(endpoint_url, self.editor, {"name": '"Hi"'}, errors={"name": 'Cannot contain the character: "'}) - - # try to create a topic with name that's too long - self.assertPost( - endpoint_url, - self.editor, - {"name": "x" * 65}, - errors={"name": "Ensure this field has no more than 64 characters."}, - ) - - # update topic by UUID - self.assertPost(endpoint_url + f"?uuid={support.uuid}", self.admin, {"name": "Support Tickets"}) - - support.refresh_from_db() - self.assertEqual(support.name, "Support Tickets") - - # can't update default topic for an org - self.assertPost( - endpoint_url + f"?uuid={self.org.default_ticket_topic.uuid}", - self.admin, - {"name": "Won't work"}, - errors={None: "Cannot modify system object."}, - status=403, - ) - - # can't update topic from other org - self.assertPost(endpoint_url + f"?uuid={other_org.uuid}", self.admin, {"name": "Won't work"}, status=404) - - # can't update topic to same name as existing topic - self.assertPost( - endpoint_url + f"?uuid={support.uuid}", - self.admin, - {"name": "General"}, - errors={"name": "This field must be unique."}, - ) - - # try creating a new topic after reaching the limit - current_count = self.org.topics.filter(is_system=False, is_active=True).count() - with override_settings(ORG_LIMIT_DEFAULTS={"topics": current_count}): - response = self.assertPost( - endpoint_url, - self.admin, - {"name": "Interesting"}, - errors={None: "Cannot create object because workspace has reached limit of 4."}, - status=409, - ) - - def test_users(self): - endpoint_url = reverse("api.v2.users") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - self.assertGet( - endpoint_url, - [self.agent, self.user, self.editor, self.admin], - results=[ - { - "avatar": None, - "email": "agent@nyaruka.com", - "first_name": "Agnes", - "last_name": "", - "role": "agent", - "created_on": format_datetime(self.agent.date_joined), - }, - { - "avatar": None, - "email": "viewer@nyaruka.com", - "first_name": "", - "last_name": "", - "role": "viewer", - "created_on": format_datetime(self.user.date_joined), - }, - { - "avatar": None, - "email": "editor@nyaruka.com", - "first_name": "Ed", - "last_name": "McEdits", - "role": "editor", - "created_on": format_datetime(self.editor.date_joined), - }, - { - "avatar": None, - "email": "admin@nyaruka.com", - "first_name": "Andy", - "last_name": "", - "role": "administrator", - "created_on": format_datetime(self.admin.date_joined), - }, - ], - # one query per user for their settings - num_queries=NUM_BASE_SESSION_QUERIES + 3, - ) - - # filter by roles - self.assertGet(endpoint_url + "?role=agent&role=editor", [self.editor], results=[self.agent, self.editor]) - - # non-existent roles ignored - self.assertGet(endpoint_url + "?role=caretaker&role=editor", [self.editor], results=[self.editor]) - - def test_workspace(self): - endpoint_url = reverse("api.v2.workspace") + ".json" - - self.assertGetNotPermitted(endpoint_url, [None]) - self.assertPostNotAllowed(endpoint_url) - self.assertDeleteNotAllowed(endpoint_url) - - # no filtering options.. just gets the current org - self.assertGet( - endpoint_url, - [self.agent, self.user, self.editor, self.admin], - raw={ - "uuid": str(self.org.uuid), - "name": "Nyaruka", - "country": "RW", - "languages": ["eng", "kin"], - "primary_language": "eng", - "timezone": "Africa/Kigali", - "date_style": "day_first", - "credits": {"used": -1, "remaining": -1}, - "anon": False, - }, - ) - - self.org.set_flow_languages(self.admin, ["kin"]) - - self.assertGet( - endpoint_url, - [self.agent], - raw={ - "uuid": str(self.org.uuid), - "name": "Nyaruka", - "country": "RW", - "languages": ["kin"], - "primary_language": "kin", - "timezone": "Africa/Kigali", - "date_style": "day_first", - "credits": {"used": -1, "remaining": -1}, - "anon": False, - }, - ) diff --git a/temba/api/v2/tests/__init__.py b/temba/api/v2/tests/__init__.py new file mode 100644 index 00000000000..070ad060533 --- /dev/null +++ b/temba/api/v2/tests/__init__.py @@ -0,0 +1,33 @@ +from django.urls import reverse + +from temba.api.tests.mixins import APITestMixin +from temba.msgs.models import Media +from temba.tests import TembaTest + + +class APITest(APITestMixin, TembaTest): + BASE_SESSION_QUERIES = 4 # number of queries required for any request using session auth + BASE_TOKEN_QUERIES = 2 # number of queries required for any request using token auth + + def upload_media(self, user, filename: str): + self.login(user) + + with open(filename, "rb") as data: + response = self.client.post( + reverse("api.v2.media") + ".json", {"file": data}, HTTP_X_FORWARDED_HTTPS="https" + ) + self.assertEqual(201, response.status_code) + + return Media.objects.get(uuid=response.json()["uuid"]) + + def assertResultsById(self, response, expected): + self.assertEqual(response.status_code, 200) + self.assertEqual([r["id"] for r in response.json()["results"]], [o.pk for o in expected]) + + def assertResultsByUUID(self, response, expected): + self.assertEqual(response.status_code, 200) + self.assertEqual([r["uuid"] for r in response.json()["results"]], [str(o.uuid) for o in expected]) + + def assert404(self, response): + self.assertEqual(response.status_code, 404) + self.assertEqual(response.json(), {"detail": "Not found."}) diff --git a/temba/api/v2/tests/test_archives.py b/temba/api/v2/tests/test_archives.py new file mode 100644 index 00000000000..5674a98f4ff --- /dev/null +++ b/temba/api/v2/tests/test_archives.py @@ -0,0 +1,146 @@ +from datetime import datetime + +from django.urls import reverse + +from temba.archives.models import Archive + +from . import APITest + + +class ArchivesEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.archives") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + # create some archives + Archive.objects.create( + org=self.org, + start_date=datetime(2017, 4, 5), + build_time=12, + record_count=34, + size=345, + hash="c4ca4238a0b923820dcc509a6f75849b", + archive_type=Archive.TYPE_MSG, + period=Archive.PERIOD_DAILY, + ) + archive2 = Archive.objects.create( + org=self.org, + start_date=datetime(2017, 5, 5), + build_time=12, + record_count=34, + size=345, + hash="c81e728d9d4c2f636f067f89cc14862c", + archive_type=Archive.TYPE_MSG, + period=Archive.PERIOD_MONTHLY, + ) + archive3 = Archive.objects.create( + org=self.org, + start_date=datetime(2017, 6, 5), + build_time=12, + record_count=34, + size=345, + hash="eccbc87e4b5ce2fe28308fd9f2a7baf3", + archive_type=Archive.TYPE_FLOWRUN, + period=Archive.PERIOD_DAILY, + ) + archive4 = Archive.objects.create( + org=self.org, + start_date=datetime(2017, 7, 5), + build_time=12, + record_count=34, + size=345, + hash="a87ff679a2f3e71d9181a67b7542122c", + archive_type=Archive.TYPE_FLOWRUN, + period=Archive.PERIOD_MONTHLY, + ) + # this archive has been rolled up and it should not be included in the API responses + Archive.objects.create( + org=self.org, + start_date=datetime(2017, 5, 1), + build_time=12, + record_count=34, + size=345, + hash="e4da3b7fbbce2345d7772b0674a318d5", + archive_type=Archive.TYPE_FLOWRUN, + period=Archive.PERIOD_DAILY, + rollup=archive2, + ) + + # create archive for other org + Archive.objects.create( + org=self.org2, + start_date=datetime(2017, 5, 1), + build_time=12, + record_count=34, + size=345, + hash="1679091c5a880faf6fb5e6087eb1b2dc", + archive_type=Archive.TYPE_FLOWRUN, + period=Archive.PERIOD_DAILY, + ) + + # there should be 4 archives in the response, because one has been rolled up + self.assertGet( + endpoint_url, + [self.editor], + results=[ + { + "archive_type": "run", + "download_url": "", + "hash": "a87ff679a2f3e71d9181a67b7542122c", + "period": "monthly", + "record_count": 34, + "size": 345, + "start_date": "2017-07-05", + }, + { + "archive_type": "run", + "download_url": "", + "hash": "eccbc87e4b5ce2fe28308fd9f2a7baf3", + "period": "daily", + "record_count": 34, + "size": 345, + "start_date": "2017-06-05", + }, + { + "archive_type": "message", + "download_url": "", + "hash": "c81e728d9d4c2f636f067f89cc14862c", + "period": "monthly", + "record_count": 34, + "size": 345, + "start_date": "2017-05-05", + }, + { + "archive_type": "message", + "download_url": "", + "hash": "c4ca4238a0b923820dcc509a6f75849b", + "period": "daily", + "record_count": 34, + "size": 345, + "start_date": "2017-04-05", + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + self.assertGet(endpoint_url + "?after=2017-05-01", [self.editor], results=[archive4, archive3, archive2]) + self.assertGet(endpoint_url + "?after=2017-05-01&archive_type=run", [self.editor], results=[archive4, archive3]) + + # unknown archive type + self.assertGet(endpoint_url + "?archive_type=invalid", [self.editor], results=[]) + + # only for dailies + self.assertGet( + endpoint_url + "?after=2017-05-01&archive_type=run&period=daily", [self.editor], results=[archive3] + ) + + # only for monthlies + self.assertGet(endpoint_url + "?period=monthly", [self.editor], results=[archive4, archive2]) + + # test access from a user with no org + self.login(self.non_org_user) + response = self.client.get(endpoint_url) + self.assertEqual(403, response.status_code) diff --git a/temba/api/v2/tests/test_base.py b/temba/api/v2/tests/test_base.py new file mode 100644 index 00000000000..b229562102c --- /dev/null +++ b/temba/api/v2/tests/test_base.py @@ -0,0 +1,322 @@ +import base64 +import time +from collections import OrderedDict +from datetime import datetime, timezone as tzone +from decimal import Decimal +from unittest.mock import patch + +from django.core.cache import cache +from django.test import override_settings +from django.urls import reverse +from django.utils import timezone + +from temba.api.models import APIToken +from temba.api.v2.serializers import normalize_extra +from temba.contacts.models import Contact +from temba.flows.models import FlowRun +from temba.orgs.models import OrgRole + +from . import APITest + + +class EndpointsTest(APITest): + def setUp(self): + super().setUp() + + self.joe = self.create_contact("Joe Blow", phone="+250788123123") + self.frank = self.create_contact("Frank", urns=["facebook:123456"]) + + self.facebook_channel = self.create_channel("FBA", "Facebook Channel", "billy_bob") + + self.hans = self.create_contact("Hans Gruber", phone="+4921551511", org=self.org2) + + self.org2channel = self.create_channel("A", "Org2Channel", "123456", country="RW", org=self.org2) + + @override_settings(REST_HANDLE_EXCEPTIONS=True) + @patch("temba.api.v2.views.FieldsEndpoint.get_queryset") + def test_error_handling(self, mock_get_queryset): + mock_get_queryset.side_effect = ValueError("DOH!") + + self.login(self.admin) + + response = self.client.get( + reverse("api.v2.fields") + ".json", content_type="application/json", HTTP_X_FORWARDED_HTTPS="https" + ) + self.assertContains(response, "Server Error. Site administrators have been notified.", status_code=500) + + @override_settings(FLOW_START_PARAMS_SIZE=4) + def test_normalize_extra(self): + self.assertEqual(OrderedDict(), normalize_extra({})) + self.assertEqual( + OrderedDict([("0", "a"), ("1", True), ("2", Decimal("1.0")), ("3", "")]), + normalize_extra(["a", True, Decimal("1.0"), None]), + ) + self.assertEqual(OrderedDict([("_3__x", "z")]), normalize_extra({"%3 !x": "z"})) + self.assertEqual( + OrderedDict([("0", "a"), ("1", "b"), ("2", "c"), ("3", "d")]), normalize_extra(["a", "b", "c", "d", "e"]) + ) + self.assertEqual( + OrderedDict([("a", 1), ("b", 2), ("c", 3), ("d", 4)]), + normalize_extra({"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}), + ) + self.assertEqual(OrderedDict([("a", "x" * 640)]), normalize_extra({"a": "x" * 641})) + + def test_authentication(self): + def _request(endpoint, post_data, **kwargs): + if post_data: + return self.client.post(endpoint, post_data, content_type="application/json", secure=True, **kwargs) + else: + return self.client.get(endpoint, secure=True, **kwargs) + + def request_by_token(endpoint, token, post_data=None): + return _request(endpoint, post_data, HTTP_AUTHORIZATION=f"Token {token}") + + def request_by_basic_auth(endpoint, username, password, post_data=None): + credentials_base64 = base64.b64encode(f"{username}:{password}".encode()).decode() + return _request(endpoint, post_data, HTTP_AUTHORIZATION=f"Basic {credentials_base64}") + + def request_by_session(endpoint, user, post_data=None): + self.login(user, choose_org=self.org) + resp = _request(endpoint, post_data) + self.client.logout() + return resp + + contacts_url = reverse("api.v2.contacts") + ".json" + campaigns_url = reverse("api.v2.campaigns") + ".json" + fields_url = reverse("api.v2.fields") + ".json" + + token1 = APIToken.create(self.org, self.admin) + token2 = APIToken.create(self.org, self.editor) + + # can GET fields endpoint using all 3 tokens + response = request_by_token(fields_url, token1.key) + self.assertEqual(200, response.status_code) + response = request_by_token(fields_url, token2.key) + self.assertEqual(200, response.status_code) + + # can POST with all tokens + response = request_by_token(fields_url, token1.key, {"name": "Field 1", "type": "text"}) + self.assertEqual(201, response.status_code) + response = request_by_token(fields_url, token2.key, {"name": "Field 2", "type": "text"}) + self.assertEqual(201, response.status_code) + + response = request_by_basic_auth(fields_url, self.admin.username, token1.key) + self.assertEqual(200, response.status_code) + + # can GET using session auth for admins, editors and servicing staff + response = request_by_session(fields_url, self.admin) + self.assertEqual(200, response.status_code) + response = request_by_session(fields_url, self.editor) + self.assertEqual(200, response.status_code) + response = request_by_session(fields_url, self.customer_support) + self.assertEqual(200, response.status_code) + + # can POST using session auth for admins and editors + response = request_by_session(fields_url, self.admin, {"name": "Field 4", "type": "text"}) + self.assertEqual(201, response.status_code) + response = request_by_session(fields_url, self.editor, {"name": "Field 5", "type": "text"}) + self.assertEqual(201, response.status_code) + response = request_by_session(fields_url, self.customer_support, {"name": "Field 6", "type": "text"}) + self.assertEqual(403, response.status_code) + + # if a staff user is actually a member of the org, they can POST + self.org.add_user(self.customer_support, OrgRole.ADMINISTRATOR) + response = request_by_session(fields_url, self.customer_support, {"name": "Field 6", "type": "text"}) + self.assertEqual(201, response.status_code) + + # can't fetch endpoint with invalid token + response = request_by_token(contacts_url, "1234567890") + self.assertResponseError(response, None, "Invalid token", status_code=403) + + # can't fetch endpoint with invalid token + response = request_by_basic_auth(contacts_url, self.admin.username, "1234567890") + self.assertResponseError(response, None, "Invalid token or email", status_code=403) + + # can't fetch endpoint with invalid username + response = request_by_basic_auth(contacts_url, "some@name.com", token1.key) + self.assertResponseError(response, None, "Invalid token or email", status_code=403) + + # can fetch campaigns endpoint with valid admin token + response = request_by_token(campaigns_url, token1.key) + self.assertEqual(200, response.status_code) + self.assertEqual(str(self.org.id), response["X-Temba-Org"]) + + response = request_by_basic_auth(contacts_url, self.editor.username, token2.key) + self.assertEqual(200, response.status_code) + self.assertEqual(str(self.org.id), response["X-Temba-Org"]) + + # simulate the admin user exceeding the rate limit for the v2 scope + cache.set(f"throttle_v2_{self.org.id}", [time.time() for r in range(10000)]) + + # next request they make using a token will be rejected + response = request_by_token(fields_url, token1.key) + self.assertEqual(response.status_code, 429) + + # same with basic auth + response = request_by_basic_auth(fields_url, self.admin.username, token1.key) + self.assertEqual(response.status_code, 429) + + # or if another user in same org makes a request + response = request_by_token(fields_url, token2.key) + self.assertEqual(response.status_code, 429) + + # but they can still make a request if they have a session + response = request_by_session(fields_url, self.admin) + self.assertEqual(response.status_code, 200) + + # are allowed to access if we have not reached the configured org api rates + self.org.api_rates = {"v2": "15000/hour"} + self.org.save(update_fields=("api_rates",)) + + response = request_by_basic_auth(fields_url, self.admin.username, token1.key) + self.assertEqual(response.status_code, 200) + + cache.set(f"throttle_v2_{self.org.id}", [time.time() for r in range(15000)]) + + # next request they make using a token will be rejected + response = request_by_token(fields_url, token1.key) + self.assertEqual(response.status_code, 429) + + # if user is demoted to a role that can't use tokens, tokens shouldn't work for them + self.org.add_user(self.admin, OrgRole.VIEWER) + + self.assertEqual(request_by_token(campaigns_url, token1.key).status_code, 403) + self.assertEqual(request_by_basic_auth(campaigns_url, self.admin.username, token1.key).status_code, 403) + + # and if user is inactive, disallow the request + self.org.add_user(self.admin, OrgRole.ADMINISTRATOR) + self.admin.is_active = False + self.admin.save() + + response = request_by_token(contacts_url, token1.key) + self.assertResponseError(response, None, "Invalid token", status_code=403) + + response = request_by_basic_auth(contacts_url, self.admin.username, token1.key) + self.assertResponseError(response, None, "Invalid token or email", status_code=403) + + @override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_HTTPS", "https")) + def test_root(self): + root_url = reverse("api.v2.root") + + # browse as HTML anonymously (should still show docs) + response = self.client.get(root_url) + self.assertContains(response, "We provide a RESTful JSON API") + + # POSTing just returns the docs with a 405 + response = self.client.post(root_url, {}) + self.assertContains(response, "We provide a RESTful JSON API", status_code=405) + + # same thing if user navigates to just /api + response = self.client.get(reverse("api"), follow=True) + self.assertContains(response, "We provide a RESTful JSON API") + + # try to browse as JSON anonymously + response = self.client.get(root_url + ".json") + self.assertEqual(200, response.status_code) + self.assertIsInstance(response.json(), dict) + self.assertEqual(response.json()["runs"], "http://testserver/api/v2/runs") # endpoints are listed + + def test_docs(self): + messages_url = reverse("api.v2.messages") + + # test fetching docs anonymously + response = self.client.get(messages_url) + self.assertContains(response, "This endpoint allows you to list messages in your account.") + + # you can also post to docs endpoints tho it just returns the docs with a 403 + response = self.client.post(messages_url, {}) + self.assertContains(response, "This endpoint allows you to list messages in your account.", status_code=403) + + # test fetching docs logged in + self.login(self.editor) + response = self.client.get(messages_url) + self.assertContains(response, "This endpoint allows you to list messages in your account.") + + def test_explorer(self): + explorer_url = reverse("api.v2.explorer") + + response = self.client.get(explorer_url) + self.assertLoginRedirect(response) + + # viewers can't access + self.login(self.user) + response = self.client.get(explorer_url) + self.assertLoginRedirect(response) + + # editors and administrators can + self.login(self.editor) + response = self.client.get(explorer_url) + self.assertEqual(200, response.status_code) + + self.login(self.admin) + + response = self.client.get(explorer_url) + self.assertContains(response, "All operations work against real data in the Nyaruka workspace.") + + def test_pagination(self): + endpoint_url = reverse("api.v2.runs") + ".json" + self.login(self.admin) + + # create 1255 test runs (5 full pages of 250 items + 1 partial with 5 items) + flow = self.create_flow("Test") + runs = [] + for r in range(1255): + runs.append(FlowRun(org=self.org, flow=flow, contact=self.joe, status="C", exited_on=timezone.now())) + FlowRun.objects.bulk_create(runs) + actual_ids = list(FlowRun.objects.order_by("-pk").values_list("pk", flat=True)) + + # give them all the same modified_on + FlowRun.objects.all().update(modified_on=datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc)) + + returned_ids = [] + + # fetch all full pages + with self.mockReadOnly(): + resp_json = None + for p in range(5): + response = self.client.get( + endpoint_url if p == 0 else resp_json["next"], content_type="application/json" + ) + self.assertEqual(200, response.status_code) + + resp_json = response.json() + + self.assertEqual(len(resp_json["results"]), 250) + self.assertIsNotNone(resp_json["next"]) + + returned_ids += [r["id"] for r in response.json()["results"]] + + # fetch final partial page + with self.mockReadOnly(): + response = self.client.get(resp_json["next"], content_type="application/json") + + resp_json = response.json() + self.assertEqual(len(resp_json["results"]), 5) + self.assertIsNone(resp_json["next"]) + + returned_ids += [r["id"] for r in response.json()["results"]] + + self.assertEqual(returned_ids, actual_ids) # ensure all results were returned and in correct order + + @patch("temba.flows.models.FlowStart.create") + def test_transactions(self, mock_flowstart_create): + """ + Serializer writes are wrapped in a transaction. This test simulates FlowStart.create blowing up and checks that + contacts aren't created. + """ + mock_flowstart_create.side_effect = ValueError("DOH!") + + flow = self.create_flow("Test") + + try: + self.assertPost( + reverse("api.v2.flow_starts") + ".json", + self.admin, + {"flow": str(flow.uuid), "urns": ["tel:+12067791212"]}, + status=201, + ) + self.fail() # ensure exception is thrown + except ValueError: + pass + + self.assertFalse(Contact.objects.filter(urns__path="+12067791212")) diff --git a/temba/api/v2/tests/test_boundaries.py b/temba/api/v2/tests/test_boundaries.py new file mode 100644 index 00000000000..4dde0e2caa1 --- /dev/null +++ b/temba/api/v2/tests/test_boundaries.py @@ -0,0 +1,142 @@ +from django.contrib.gis.geos import GEOSGeometry +from django.urls import reverse + +from temba.locations.models import BoundaryAlias +from temba.tests import matchers + +from . import APITest + + +class BoundariesEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.boundaries") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + self.setUpLocations() + + BoundaryAlias.create(self.org, self.admin, self.state1, "Kigali") + BoundaryAlias.create(self.org, self.admin, self.state2, "East Prov") + BoundaryAlias.create(self.org2, self.admin2, self.state1, "Other Org") # shouldn't be returned + + self.state1.simplified_geometry = GEOSGeometry("MULTIPOLYGON(((1 1, 1 -1, -1 -1, -1 1, 1 1)))") + self.state1.save() + + # test without geometry + self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[ + { + "osm_id": "1708283", + "name": "Kigali City", + "parent": {"osm_id": "171496", "name": "Rwanda"}, + "level": 1, + "aliases": ["Kigali", "Kigari"], + "geometry": None, + }, + { + "osm_id": "171113181", + "name": "Kageyo", + "parent": {"osm_id": "R1711131", "name": "Gatsibo"}, + "level": 3, + "aliases": [], + "geometry": None, + }, + { + "osm_id": "1711142", + "name": "Rwamagana", + "parent": {"osm_id": "171591", "name": "Eastern Province"}, + "level": 2, + "aliases": [], + "geometry": None, + }, + { + "osm_id": "1711163", + "name": "Kay\u00f4nza", + "parent": {"osm_id": "171591", "name": "Eastern Province"}, + "level": 2, + "aliases": [], + "geometry": None, + }, + { + "osm_id": "171116381", + "name": "Kabare", + "parent": {"osm_id": "1711163", "name": "Kay\u00f4nza"}, + "level": 3, + "aliases": [], + "geometry": None, + }, + {"osm_id": "171496", "name": "Rwanda", "parent": None, "level": 0, "aliases": [], "geometry": None}, + { + "osm_id": "171591", + "name": "Eastern Province", + "parent": {"osm_id": "171496", "name": "Rwanda"}, + "level": 1, + "aliases": ["East Prov"], + "geometry": None, + }, + { + "osm_id": "3963734", + "name": "Nyarugenge", + "parent": {"osm_id": "1708283", "name": "Kigali City"}, + "level": 2, + "aliases": [], + "geometry": None, + }, + { + "osm_id": "R1711131", + "name": "Gatsibo", + "parent": {"osm_id": "171591", "name": "Eastern Province"}, + "level": 2, + "aliases": [], + "geometry": None, + }, + { + "osm_id": "VMN.49.1_1", + "name": "Bukure", + "parent": {"osm_id": "1711142", "name": "Rwamagana"}, + "level": 3, + "aliases": [], + "geometry": None, + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 3, + ) + + # test with geometry + self.assertGet( + endpoint_url + "?geometry=true", + [self.admin], + results=[ + { + "osm_id": "1708283", + "name": "Kigali City", + "parent": {"osm_id": "171496", "name": "Rwanda"}, + "level": 1, + "aliases": ["Kigali", "Kigari"], + "geometry": { + "type": "MultiPolygon", + "coordinates": [[[[1.0, 1.0], [1.0, -1.0], [-1.0, -1.0], [-1.0, 1.0], [1.0, 1.0]]]], + }, + }, + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + matchers.Dict(), + ], + num_queries=self.BASE_SESSION_QUERIES + 3, + ) + + # if org doesn't have a country, just return no results + self.org.country = None + self.org.save(update_fields=("country",)) + + self.assertGet(endpoint_url, [self.admin], results=[]) diff --git a/temba/api/v2/tests/test_broadcasts.py b/temba/api/v2/tests/test_broadcasts.py new file mode 100644 index 00000000000..ae7b67f7a47 --- /dev/null +++ b/temba/api/v2/tests/test_broadcasts.py @@ -0,0 +1,250 @@ +from django.conf import settings +from django.urls import reverse +from django.utils import timezone + +from temba.api.v2.serializers import format_datetime +from temba.msgs.models import Broadcast +from temba.orgs.models import Org +from temba.schedules.models import Schedule +from temba.tests import mock_mailroom + +from . import APITest + + +class BroadcastsEndpointTest(APITest): + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.broadcasts") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotAllowed(endpoint_url) + + self.create_channel("FBA", "Facebook Channel", "billy_bob") + joe = self.create_contact("Joe Blow", phone="+250788123123") + frank = self.create_contact("Frank", urns=["facebook:123456"]) + reporters = self.create_group("Reporters", [joe, frank]) + + hans = self.create_contact("Hans Gruber", phone="+4921551511", org=self.org2) + self.create_channel("A", "Org2Channel", "123456", country="RW", org=self.org2) + + bcast1 = self.create_broadcast( + self.admin, {"eng": {"text": "Hello 1"}}, urns=["facebook:12345"], status=Broadcast.STATUS_PENDING + ) + bcast2 = self.create_broadcast( + self.admin, {"eng": {"text": "Hello 2"}}, contacts=[joe], status=Broadcast.STATUS_PENDING + ) + bcast3 = self.create_broadcast( + self.admin, {"eng": {"text": "Hello 3"}}, contacts=[frank], status=Broadcast.STATUS_COMPLETED + ) + bcast4 = self.create_broadcast( + self.admin, + {"eng": {"text": "Hello 4"}}, + urns=["facebook:12345"], + contacts=[joe], + groups=[reporters], + status=Broadcast.STATUS_FAILED, + ) + self.create_broadcast( + self.admin, + {"eng": {"text": "Scheduled"}}, + contacts=[joe], + schedule=Schedule.create(self.org, timezone.now(), Schedule.REPEAT_DAILY), + ) + self.create_broadcast(self.admin2, {"eng": {"text": "Different org..."}}, contacts=[hans], org=self.org2) + + # no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[bcast4, bcast3, bcast2, bcast1], + num_queries=self.BASE_SESSION_QUERIES + 4, + ) + resp_json = response.json() + + self.assertEqual( + { + "id": bcast2.id, + "status": "pending", + "progress": {"total": -1, "started": 0}, + "urns": [], + "contacts": [{"uuid": joe.uuid, "name": joe.name}], + "groups": [], + "text": {"eng": "Hello 2"}, + "attachments": {"eng": []}, + "base_language": "eng", + "created_on": format_datetime(bcast2.created_on), + }, + resp_json["results"][2], + ) + self.assertEqual( + { + "id": bcast4.id, + "status": "failed", + "progress": {"total": 2, "started": 2}, + "urns": ["facebook:12345"], + "contacts": [{"uuid": joe.uuid, "name": joe.name}], + "groups": [{"uuid": reporters.uuid, "name": reporters.name}], + "text": {"eng": "Hello 4"}, + "attachments": {"eng": []}, + "base_language": "eng", + "created_on": format_datetime(bcast4.created_on), + }, + resp_json["results"][0], + ) + + # filter by id + self.assertGet(endpoint_url + f"?id={bcast3.id}", [self.editor], results=[bcast3]) + + # filter by before / after + self.assertGet( + endpoint_url + f"?before={format_datetime(bcast2.created_on)}", [self.editor], results=[bcast2, bcast1] + ) + self.assertGet( + endpoint_url + f"?after={format_datetime(bcast3.created_on)}", [self.editor], results=[bcast4, bcast3] + ) + + with self.anonymous(self.org): + response = self.assertGet(endpoint_url + f"?id={bcast1.id}", [self.editor], results=[bcast1]) + + # URNs shouldn't be included + self.assertIsNone(response.json()["results"][0]["urns"]) + + # try to create new broadcast with no data at all + self.assertPost( + endpoint_url, self.admin, {}, errors={"non_field_errors": "Must provide either text or attachments."} + ) + + # try to create new broadcast with no recipients + self.assertPost( + endpoint_url, + self.admin, + {"text": "Hello"}, + errors={"non_field_errors": "Must provide either urns, contacts or groups."}, + ) + + # try to create new broadcast with invalid group lookup + self.assertPost( + endpoint_url, + self.admin, + {"text": "Hello", "groups": [123456]}, + errors={"groups": "No such object: 123456"}, + ) + + # try to create new broadcast with translations that don't include base language + self.assertPost( + endpoint_url, + self.admin, + {"text": {"kin": "Muraho"}, "base_language": "eng", "contacts": [joe.uuid]}, + errors={"non_field_errors": "No text translation provided in base language."}, + ) + + media1 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") + media2 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/snow.mp4") + + # try to create new broadcast with attachment translations that don't include base language + self.assertPost( + endpoint_url, + self.admin, + { + "text": {"eng": "Hello"}, + "attachments": {"spa": [str(media1.uuid)]}, + "base_language": "eng", + "contacts": [joe.uuid], + }, + errors={"non_field_errors": "No attachment translations provided in base language."}, + ) + + # create new broadcast with all fields + response = self.assertPost( + endpoint_url, + self.admin, + { + "text": {"eng": "Hello @contact.name", "spa": "Hola @contact.name"}, + "attachments": { + "eng": [str(media1.uuid), f"video/mp4:http://example.com/{media2.uuid}.mp4"], + "kin": [str(media2.uuid)], + }, + "base_language": "eng", + "urns": ["facebook:12345"], + "contacts": [joe.uuid, frank.uuid], + "groups": [reporters.uuid], + }, + status=201, + ) + + broadcast = Broadcast.objects.get(id=response.json()["id"]) + self.assertEqual( + { + "eng": { + "text": "Hello @contact.name", + "attachments": [f"image/jpeg:{media1.url}", f"video/mp4:{media2.url}"], + }, + "spa": {"text": "Hola @contact.name"}, + "kin": {"attachments": [f"video/mp4:{media2.url}"]}, + }, + broadcast.translations, + ) + self.assertEqual("eng", broadcast.base_language) + self.assertEqual(["facebook:12345"], broadcast.urns) + self.assertEqual({joe, frank}, set(broadcast.contacts.all())) + self.assertEqual({reporters}, set(broadcast.groups.all())) + + # create new broadcast without translations + response = self.assertPost( + endpoint_url, + self.admin, + { + "text": "Hello", + "attachments": [str(media1.uuid), str(media2.uuid)], + "contacts": [joe.uuid, frank.uuid], + }, + status=201, + ) + + broadcast = Broadcast.objects.get(id=response.json()["id"]) + self.assertEqual( + { + "eng": { + "text": "Hello", + "attachments": [f"image/jpeg:{media1.url}", f"video/mp4:{media2.url}"], + } + }, + broadcast.translations, + ) + self.assertEqual("eng", broadcast.base_language) + self.assertEqual({joe, frank}, set(broadcast.contacts.all())) + + # create new broadcast without translations containing only text, no attachments + response = self.assertPost( + endpoint_url, + self.admin, + {"text": "Hello", "contacts": [joe.uuid, frank.uuid]}, + status=201, + ) + + broadcast = Broadcast.objects.get(id=response.json()["id"]) + self.assertEqual({"eng": {"text": "Hello"}}, broadcast.translations) + + # create new broadcast without translations containing only attachments, no text + response = self.assertPost( + endpoint_url, + self.admin, + {"attachments": [str(media1.uuid), str(media2.uuid)], "contacts": [joe.uuid, frank.uuid]}, + status=201, + ) + + broadcast = Broadcast.objects.get(id=response.json()["id"]) + self.assertEqual( + {"eng": {"attachments": [f"image/jpeg:{media1.url}", f"video/mp4:{media2.url}"]}}, + broadcast.translations, + ) + + # try sending as a flagged org + self.org.flag() + self.assertPost( + endpoint_url, + self.admin, + {"text": "Hello", "contacts": [joe.uuid]}, + errors={"non_field_errors": Org.BLOCKER_FLAGGED}, + ) diff --git a/temba/api/v2/tests/test_campaign_events.py b/temba/api/v2/tests/test_campaign_events.py new file mode 100644 index 00000000000..003891fcbe7 --- /dev/null +++ b/temba/api/v2/tests/test_campaign_events.py @@ -0,0 +1,359 @@ +from django.urls import reverse +from django.utils import timezone + +from temba.api.v2.serializers import format_datetime +from temba.campaigns.models import Campaign, CampaignEvent +from temba.contacts.models import ContactField, ContactGroup +from temba.tests import matchers, mock_mailroom + +from . import APITest + + +class CampaignEventsEndpointTest(APITest): + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.campaign_events") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotPermitted(endpoint_url, [None, self.user, self.agent]) + + joe = self.create_contact("Joe Blow", phone="+250788123123") + frank = self.create_contact("Frank", urns=["facebook:123456"]) + flow = self.create_flow("Test Flow") + reporters = self.create_group("Reporters", [joe, frank]) + registration = self.create_field("registration", "Registration", value_type=ContactField.TYPE_DATETIME) + field_created_on = self.org.fields.get(key="created_on") + + # create our contact and set a registration date + contact = self.create_contact( + "Joe", phone="+12065551515", fields={"registration": self.org.format_datetime(timezone.now())} + ) + reporters.contacts.add(contact) + + campaign1 = Campaign.create(self.org, self.admin, "Reminders", reporters) + event1 = CampaignEvent.create_message_event( + self.org, + self.admin, + campaign1, + registration, + 1, + CampaignEvent.UNIT_DAYS, + "Don't forget to brush your teeth", + ) + + campaign2 = Campaign.create(self.org, self.admin, "Notifications", reporters) + event2 = CampaignEvent.create_flow_event( + self.org, self.admin, campaign2, registration, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 + ) + + campaign3 = Campaign.create(self.org, self.admin, "Alerts", reporters) + event3 = CampaignEvent.create_flow_event( + self.org, self.admin, campaign3, field_created_on, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 + ) + + # create event for another org + joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) + spammers = ContactGroup.get_or_create(self.org2, self.admin2, "Spammers") + spam = Campaign.create(self.org2, self.admin2, "Cool stuff", spammers) + CampaignEvent.create_flow_event( + self.org2, self.admin2, spam, joined, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 + ) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[ + { + "uuid": str(event3.uuid), + "campaign": {"uuid": str(campaign3.uuid), "name": "Alerts"}, + "relative_to": {"key": "created_on", "name": "Created On", "label": "Created On"}, + "offset": 6, + "unit": "hours", + "delivery_hour": 12, + "flow": {"uuid": flow.uuid, "name": "Test Flow"}, + "message": None, + "created_on": format_datetime(event3.created_on), + }, + { + "uuid": str(event2.uuid), + "campaign": {"uuid": str(campaign2.uuid), "name": "Notifications"}, + "relative_to": {"key": "registration", "name": "Registration", "label": "Registration"}, + "offset": 6, + "unit": "hours", + "delivery_hour": 12, + "flow": {"uuid": flow.uuid, "name": "Test Flow"}, + "message": None, + "created_on": format_datetime(event2.created_on), + }, + { + "uuid": str(event1.uuid), + "campaign": {"uuid": str(campaign1.uuid), "name": "Reminders"}, + "relative_to": {"key": "registration", "name": "Registration", "label": "Registration"}, + "offset": 1, + "unit": "days", + "delivery_hour": -1, + "flow": None, + "message": {"eng": "Don't forget to brush your teeth"}, + "created_on": format_datetime(event1.created_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 4, + ) + + # filter by UUID + self.assertGet(endpoint_url + f"?uuid={event1.uuid}", [self.editor], results=[event1]) + + # filter by campaign name + self.assertGet(endpoint_url + "?campaign=Reminders", [self.editor], results=[event1]) + + # filter by campaign UUID + self.assertGet(endpoint_url + f"?campaign={campaign1.uuid}", [self.editor], results=[event1]) + + # filter by invalid campaign + self.assertGet(endpoint_url + "?campaign=Invalid", [self.editor], results=[]) + + # try to create empty campaign event + self.assertPost( + endpoint_url, + self.editor, + {}, + errors={ + "campaign": "This field is required.", + "relative_to": "This field is required.", + "offset": "This field is required.", + "unit": "This field is required.", + "delivery_hour": "This field is required.", + }, + ) + + # try again with some invalid values + self.assertPost( + endpoint_url, + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "epocs", + "delivery_hour": 25, + "message": {"kin": "Muraho"}, + }, + errors={ + "unit": '"epocs" is not a valid choice.', + "delivery_hour": "Ensure this value is less than or equal to 23.", + "message": "Message text in default flow language is required.", + }, + ) + + # provide valid values for those fields.. but not a message or flow + self.assertPost( + endpoint_url, + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + }, + errors={ + "non_field_errors": "Flow or a message text required.", + }, + ) + + # create a message event + self.assertPost( + endpoint_url, + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "message": "You are @fields.age", + }, + status=201, + ) + + event1 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() + self.assertEqual(event1.event_type, CampaignEvent.TYPE_MESSAGE) + self.assertEqual(event1.relative_to, registration) + self.assertEqual(event1.offset, 15) + self.assertEqual(event1.unit, "W") + self.assertEqual(event1.delivery_hour, -1) + self.assertEqual(event1.message, {"eng": "You are @fields.age"}) + self.assertIsNotNone(event1.flow) + + # try to create a message event with an empty message + self.assertPost( + endpoint_url, + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "message": "", + }, + errors={("message", "eng"): "This field may not be blank."}, + ) + + self.assertPost( + endpoint_url, + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "created_on", + "offset": 15, + "unit": "days", + "delivery_hour": -1, + "message": "Nice unit of work @fields.code", + }, + status=201, + ) + + event1 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() + self.assertEqual(event1.event_type, CampaignEvent.TYPE_MESSAGE) + self.assertEqual(event1.relative_to, field_created_on) + self.assertEqual(event1.offset, 15) + self.assertEqual(event1.unit, "D") + self.assertEqual(event1.delivery_hour, -1) + self.assertEqual(event1.message, {"eng": "Nice unit of work @fields.code"}) + self.assertIsNotNone(event1.flow) + + # create a flow event + self.assertPost( + endpoint_url, + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "flow": str(flow.uuid), + }, + status=201, + ) + + event2 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() + self.assertEqual(event2.event_type, CampaignEvent.TYPE_FLOW) + self.assertEqual(event2.relative_to, registration) + self.assertEqual(event2.offset, 15) + self.assertEqual(event2.unit, "W") + self.assertEqual(event2.delivery_hour, -1) + self.assertEqual(event2.message, None) + self.assertEqual(event2.flow, flow) + + # make sure we queued a mailroom task to schedule this event + self.assertEqual( + { + "org_id": self.org.id, + "type": "schedule_campaign_event", + "queued_on": matchers.Datetime(), + "task": {"campaign_event_id": event2.id, "org_id": self.org.id}, + }, + mr_mocks.queued_batch_tasks[-1], + ) + + # update the message event to be a flow event + self.assertPost( + endpoint_url + f"?uuid={event1.uuid}", + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "flow": str(flow.uuid), + }, + ) + + event1 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() + + self.assertEqual(event1.event_type, CampaignEvent.TYPE_FLOW) + self.assertIsNone(event1.message) + self.assertEqual(event1.flow, flow) + + # and update the flow event to be a message event + self.assertPost( + endpoint_url + f"?uuid={event2.uuid}", + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "message": {"eng": "OK @(format_urn(urns.tel))", "fra": "D'accord"}, + }, + ) + + event2 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() + self.assertEqual(event2.event_type, CampaignEvent.TYPE_MESSAGE) + self.assertEqual(event2.message, {"eng": "OK @(format_urn(urns.tel))", "fra": "D'accord"}) + + # and update update it's message again + self.assertPost( + endpoint_url + f"?uuid={event2.uuid}", + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "message": {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}, + }, + ) + + event2 = CampaignEvent.objects.filter(campaign=campaign1).order_by("-id").first() + self.assertEqual(event2.event_type, CampaignEvent.TYPE_MESSAGE) + self.assertEqual(event2.message, {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}) + + # try to change an existing event's campaign + self.assertPost( + endpoint_url + f"?uuid={event1.uuid}", + self.editor, + { + "campaign": str(campaign2.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "message": {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}, + }, + errors={"campaign": "Cannot change campaign for existing events"}, + ) + + # try an empty delete request + self.assertDelete( + endpoint_url, self.editor, errors={None: "URL must contain one of the following parameters: uuid"} + ) + + # delete an event by UUID + self.assertDelete(endpoint_url + f"?uuid={event1.uuid}", self.editor) + + self.assertFalse(CampaignEvent.objects.filter(id=event1.id, is_active=True).exists()) + + # can't make changes to events on archived campaigns + campaign1.archive(self.admin) + + self.assertPost( + endpoint_url + f"?uuid={event2.uuid}", + self.editor, + { + "campaign": str(campaign1.uuid), + "relative_to": "registration", + "offset": 15, + "unit": "weeks", + "delivery_hour": -1, + "message": {"eng": "OK", "fra": "D'accord", "kin": "Sawa"}, + }, + errors={"campaign": f"No such object: {campaign1.uuid}"}, + ) diff --git a/temba/api/v2/tests/test_campaigns.py b/temba/api/v2/tests/test_campaigns.py new file mode 100644 index 00000000000..6d1ee41ea6a --- /dev/null +++ b/temba/api/v2/tests/test_campaigns.py @@ -0,0 +1,134 @@ +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.campaigns.models import Campaign +from temba.contacts.models import ContactGroup + +from . import APITest + + +class CampaignsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.campaigns") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotAllowed(endpoint_url) + + joe = self.create_contact("Joe Blow", phone="+250788123123") + frank = self.create_contact("Frank", urns=["facebook:123456"]) + reporters = self.create_group("Reporters", [joe, frank]) + other_group = self.create_group("Others", []) + campaign1 = Campaign.create(self.org, self.admin, "Reminders #1", reporters) + campaign2 = Campaign.create(self.org, self.admin, "Reminders #2", reporters) + + # create campaign for other org + spammers = ContactGroup.get_or_create(self.org2, self.admin2, "Spammers") + spam = Campaign.create(self.org2, self.admin2, "Spam", spammers) + + # no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[ + { + "uuid": str(campaign2.uuid), + "name": "Reminders #2", + "archived": False, + "group": {"uuid": reporters.uuid, "name": "Reporters"}, + "created_on": format_datetime(campaign2.created_on), + }, + { + "uuid": str(campaign1.uuid), + "name": "Reminders #1", + "archived": False, + "group": {"uuid": reporters.uuid, "name": "Reporters"}, + "created_on": format_datetime(campaign1.created_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by UUID + self.assertGet(endpoint_url + f"?uuid={campaign1.uuid}", [self.editor], results=[campaign1]) + + # try to create empty campaign + self.assertPost( + endpoint_url, + self.editor, + {}, + errors={"name": "This field is required.", "group": "This field is required."}, + ) + + # create new campaign + response = self.assertPost( + endpoint_url, self.editor, {"name": "Reminders #3", "group": reporters.uuid}, status=201 + ) + + campaign3 = Campaign.objects.get(name="Reminders #3") + self.assertEqual( + response.json(), + { + "uuid": str(campaign3.uuid), + "name": "Reminders #3", + "archived": False, + "group": {"uuid": reporters.uuid, "name": "Reporters"}, + "created_on": format_datetime(campaign3.created_on), + }, + ) + + # try to create another campaign with same name + self.assertPost( + endpoint_url, + self.editor, + {"name": "Reminders #3", "group": reporters.uuid}, + errors={"name": "This field must be unique."}, + ) + + # it's fine if a campaign in another org has that name + self.assertPost(endpoint_url, self.editor, {"name": "Spam", "group": reporters.uuid}, status=201) + + # try to create a campaign with name that's too long + self.assertPost( + endpoint_url, + self.editor, + {"name": "x" * 65, "group": reporters.uuid}, + errors={"name": "Ensure this field has no more than 64 characters."}, + ) + + # update campaign by UUID + self.assertPost( + endpoint_url + f"?uuid={campaign3.uuid}", self.editor, {"name": "Reminders III", "group": other_group.uuid} + ) + + campaign3.refresh_from_db() + self.assertEqual(campaign3.name, "Reminders III") + self.assertEqual(campaign3.group, other_group) + + # can't update campaign in other org + self.assertPost( + endpoint_url + f"?uuid={spam.uuid}", self.editor, {"name": "Won't work", "group": spammers.uuid}, status=404 + ) + + # can't update deleted campaign + campaign1.is_active = False + campaign1.save(update_fields=("is_active",)) + + self.assertPost( + endpoint_url + f"?uuid={campaign1.uuid}", + self.editor, + {"name": "Won't work", "group": spammers.uuid}, + status=404, + ) + + # can't update inactive or archived campaign + campaign1.is_active = True + campaign1.is_archived = True + campaign1.save(update_fields=("is_active", "is_archived")) + + self.assertPost( + endpoint_url + f"?uuid={campaign1.uuid}", + self.editor, + {"name": "Won't work", "group": spammers.uuid}, + status=404, + ) diff --git a/temba/api/v2/tests/test_channel_events.py b/temba/api/v2/tests/test_channel_events.py new file mode 100644 index 00000000000..ef5316772f8 --- /dev/null +++ b/temba/api/v2/tests/test_channel_events.py @@ -0,0 +1,64 @@ +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.channels.models import ChannelEvent + +from . import APITest + + +class ChannelEventsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.channel_events") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + joe = self.create_contact("Joe Blow", phone="+250788123123") + call1 = self.create_channel_event(self.channel, "tel:+250788123123", ChannelEvent.TYPE_CALL_IN_MISSED) + call2 = self.create_channel_event( + self.channel, "tel:+250788124124", ChannelEvent.TYPE_CALL_IN, extra=dict(duration=36) + ) + call3 = self.create_channel_event(self.channel, "tel:+250788124124", ChannelEvent.TYPE_CALL_OUT_MISSED) + call4 = self.create_channel_event( + self.channel, "tel:+250788123123", ChannelEvent.TYPE_CALL_OUT, extra=dict(duration=15) + ) + + # no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[call4, call3, call2, call1], + num_queries=self.BASE_SESSION_QUERIES + 3, + ) + + resp_json = response.json() + self.assertEqual( + resp_json["results"][0], + { + "id": call4.pk, + "channel": {"uuid": self.channel.uuid, "name": "Test Channel"}, + "type": "call-out", + "contact": {"uuid": joe.uuid, "name": joe.name}, + "occurred_on": format_datetime(call4.occurred_on), + "extra": dict(duration=15), + "created_on": format_datetime(call4.created_on), + }, + ) + + # filter by id + self.assertGet(endpoint_url + f"?id={call1.id}", [self.editor], results=[call1]) + + # filter by contact + self.assertGet(endpoint_url + f"?contact={joe.uuid}", [self.editor], results=[call4, call1]) + + # filter by invalid contact + self.assertGet(endpoint_url + "?contact=invalid", [self.editor], results=[]) + + # filter by before / after + self.assertGet( + endpoint_url + f"?before={format_datetime(call3.created_on)}", [self.editor], results=[call3, call2, call1] + ) + self.assertGet( + endpoint_url + f"?after={format_datetime(call2.created_on)}", [self.editor], results=[call4, call3, call2] + ) diff --git a/temba/api/v2/tests/test_channels.py b/temba/api/v2/tests/test_channels.py new file mode 100644 index 00000000000..6cb1bb54048 --- /dev/null +++ b/temba/api/v2/tests/test_channels.py @@ -0,0 +1,62 @@ +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime + +from . import APITest + + +class ChannelEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.channels") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + facebook = self.create_channel("FBA", "Facebook Channel", "billy_bob") + + # create deleted channel + deleted = self.create_channel("JC", "Deleted", "nyaruka") + deleted.release(self.admin) + + # create channel for other org + self.create_channel("FBA", "Facebook Channel", "nyaruka", org=self.org2) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[ + { + "uuid": str(facebook.uuid), + "name": "Facebook Channel", + "address": "billy_bob", + "country": None, + "device": None, + "last_seen": None, + "created_on": format_datetime(facebook.created_on), + }, + { + "uuid": str(self.channel.uuid), + "name": "Test Channel", + "address": "+250785551212", + "country": "RW", + "device": { + "name": "Nexus 5X", + "network_type": None, + "power_level": -1, + "power_source": None, + "power_status": None, + }, + "last_seen": format_datetime(self.channel.last_seen), + "created_on": format_datetime(self.channel.created_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by UUID + self.assertGet(endpoint_url + f"?uuid={facebook.uuid}", [self.admin], results=[facebook]) + + # filter by address + self.assertGet(endpoint_url + "?address=billy_bob", [self.admin], results=[facebook]) diff --git a/temba/api/v2/tests/test_classifiers.py b/temba/api/v2/tests/test_classifiers.py new file mode 100644 index 00000000000..b0c19184142 --- /dev/null +++ b/temba/api/v2/tests/test_classifiers.py @@ -0,0 +1,53 @@ +from django.urls import reverse +from django.utils import timezone + +from temba.api.v2.serializers import format_datetime +from temba.classifiers.models import Classifier +from temba.classifiers.types.luis import LuisType +from temba.classifiers.types.wit import WitType + +from . import APITest + + +class ClassifiersEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.classifiers") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + # create some classifiers + c1 = Classifier.create(self.org, self.admin, WitType.slug, "Booker", {}) + c1.intents.create(name="book_flight", external_id="book_flight", created_on=timezone.now(), is_active=True) + c1.intents.create(name="book_hotel", external_id="book_hotel", created_on=timezone.now(), is_active=False) + c1.intents.create(name="book_car", external_id="book_car", created_on=timezone.now(), is_active=True) + + c2 = Classifier.create(self.org, self.admin, WitType.slug, "Old Booker", {}) + c2.is_active = False + c2.save() + + # on another org + Classifier.create(self.org2, self.admin, LuisType.slug, "Org2 Booker", {}) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[ + { + "name": "Booker", + "type": "wit", + "uuid": str(c1.uuid), + "intents": ["book_car", "book_flight"], + "created_on": format_datetime(c1.created_on), + } + ], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by uuid (not there) + self.assertGet(endpoint_url + "?uuid=09d23a05-47fe-11e4-bfe9-b8f6b119e9ab", [self.editor], results=[]) + + # filter by uuid present + self.assertGet(endpoint_url + f"?uuid={c1.uuid}", [self.user, self.editor, self.admin], results=[c1]) diff --git a/temba/api/v2/tests/test_contact_actions.py b/temba/api/v2/tests/test_contact_actions.py new file mode 100644 index 00000000000..ae73325d382 --- /dev/null +++ b/temba/api/v2/tests/test_contact_actions.py @@ -0,0 +1,232 @@ +from unittest.mock import patch + +from django.urls import reverse + +from temba.contacts.models import Contact +from temba.msgs.models import Msg +from temba.tests import mock_mailroom +from temba.tests.engine import MockSessionWriter + +from . import APITest + + +class ContactActionsEndpointTest(APITest): + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.contact_actions") + ".json" + + self.assertGetNotAllowed(endpoint_url) + self.assertPostNotPermitted(endpoint_url, [None, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + for contact in Contact.objects.all(): + contact.release(self.admin) + contact.delete() + + # create some contacts to act on + contact1 = self.create_contact("Ann", phone="+250788000001") + contact2 = self.create_contact("Bob", phone="+250788000002") + contact3 = self.create_contact("Cat", phone="+250788000003") + contact4 = self.create_contact("Don", phone="+250788000004") # a blocked contact + contact5 = self.create_contact("Eve", phone="+250788000005") # a deleted contact + contact4.block(self.user) + contact5.release(self.user) + + group = self.create_group("Testers") + self.create_field("isdeveloper", "Is developer") + self.create_group("Developers", query="isdeveloper = YES") + other_org_group = self.create_group("Testers", org=self.org2) + + # create some waiting runs for some of the contacts + flow = self.create_flow("Favorites") + MockSessionWriter(contact1, flow).wait().save() + MockSessionWriter(contact2, flow).wait().save() + MockSessionWriter(contact3, flow).wait().save() + + self.create_incoming_msg(contact1, "Hello") + self.create_incoming_msg(contact2, "Hello") + self.create_incoming_msg(contact3, "Hello") + self.create_incoming_msg(contact4, "Hello") + + # try adding more contacts to group than this endpoint is allowed to operate on at one time + self.assertPost( + endpoint_url, + self.agent, + {"contacts": [str(x) for x in range(101)], "action": "add", "group": "Testers"}, + errors={"contacts": "Ensure this field has no more than 100 elements."}, + ) + + # try adding all contacts to a group by its name + self.assertPost( + endpoint_url, + self.editor, + { + "contacts": [contact1.uuid, "tel:+250788000002", contact3.uuid, contact4.uuid, contact5.uuid], + "action": "add", + "group": "Testers", + }, + errors={"contacts": "No such object: %s" % contact5.uuid}, + ) + + # try adding a blocked contact to a group + self.assertPost( + endpoint_url, + self.admin, + { + "contacts": [contact1.uuid, contact2.uuid, contact3.uuid, contact4.uuid], + "action": "add", + "group": "Testers", + }, + errors={"non_field_errors": "Non-active contacts cannot be added to groups: %s" % contact4.uuid}, + ) + + # add valid contacts to the group by name + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid, "tel:+250788000002"], "action": "add", "group": "Testers"}, + status=204, + ) + self.assertEqual(set(group.contacts.all()), {contact1, contact2}) + + # try to add to a non-existent group + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid], "action": "add", "group": "Spammers"}, + errors={"group": "No such object: Spammers"}, + ) + + # try to add to a dynamic group + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid], "action": "add", "group": "Developers"}, + errors={"group": "Contact group must not be query based: Developers"}, + ) + + # add contact 3 to a group by its UUID + self.assertPost( + endpoint_url, self.admin, {"contacts": [contact3.uuid], "action": "add", "group": group.uuid}, status=204 + ) + self.assertEqual(set(group.contacts.all()), {contact1, contact2, contact3}) + + # try adding with invalid group UUID + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact3.uuid], "action": "add", "group": "15611256-95b5-46d5-b857-abafe0d32fe9"}, + errors={"group": "No such object: 15611256-95b5-46d5-b857-abafe0d32fe9"}, + ) + + # try to add to a group in another org + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact3.uuid], "action": "add", "group": other_org_group.uuid}, + errors={"group": f"No such object: {other_org_group.uuid}"}, + ) + + # remove contact 2 from group by its name (which is case-insensitive) + self.assertPost( + endpoint_url, self.admin, {"contacts": [contact2.uuid], "action": "remove", "group": "testers"}, status=204 + ) + self.assertEqual(set(group.contacts.all()), {contact1, contact3}) + + # and remove contact 3 from group by its UUID + self.assertPost( + endpoint_url, self.admin, {"contacts": [contact3.uuid], "action": "remove", "group": group.uuid}, status=204 + ) + self.assertEqual(set(group.contacts.all()), {contact1}) + + # try to add to group without specifying a group + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid], "action": "add"}, + errors={"non_field_errors": 'For action "add" you should also specify a group'}, + ) + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid], "action": "add", "group": ""}, + errors={"group": "This field may not be null."}, + ) + + # block all contacts + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid, contact2.uuid, contact3.uuid, contact4.uuid], "action": "block"}, + status=204, + ) + self.assertEqual( + set(Contact.objects.filter(status=Contact.STATUS_BLOCKED)), {contact1, contact2, contact3, contact4} + ) + + # unblock contact 1 + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid], "action": "unblock"}, + status=204, + ) + self.assertEqual(set(self.org.contacts.filter(status=Contact.STATUS_ACTIVE)), {contact1, contact5}) + self.assertEqual(set(self.org.contacts.filter(status=Contact.STATUS_BLOCKED)), {contact2, contact3, contact4}) + + # interrupt any active runs of contacts 1 and 2 + with patch("temba.mailroom.queue_interrupt") as mock_queue_interrupt: + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid, contact2.uuid], "action": "interrupt"}, + status=204, + ) + + mock_queue_interrupt.assert_called_once_with(self.org, contacts=[contact1, contact2]) + + # archive all messages for contacts 1 and 2 + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid, contact2.uuid], "action": "archive_messages"}, + status=204, + ) + self.assertFalse(Msg.objects.filter(contact__in=[contact1, contact2], direction="I", visibility="V").exists()) + self.assertTrue(Msg.objects.filter(contact=contact3, direction="I", visibility="V").exists()) + + # delete contacts 1 and 2 + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact1.uuid, contact2.uuid], "action": "delete"}, + status=204, + ) + self.assertEqual(set(self.org.contacts.filter(is_active=False)), {contact1, contact2, contact5}) + self.assertEqual(set(self.org.contacts.filter(is_active=True)), {contact3, contact4}) + self.assertFalse(Msg.objects.filter(contact__in=[contact1, contact2]).exclude(visibility="D").exists()) + self.assertTrue(Msg.objects.filter(contact=contact3).exclude(visibility="D").exists()) + + # try to provide a group for a non-group action + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact3.uuid], "action": "block", "group": "Testers"}, + errors={"non_field_errors": 'For action "block" you should not specify a group'}, + ) + + # trying to act on zero contacts is an error + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [], "action": "block"}, + errors={"contacts": "Contacts can't be empty."}, + ) + + # try to invoke an invalid action + self.assertPost( + endpoint_url, + self.admin, + {"contacts": [contact3.uuid], "action": "like"}, + errors={"action": '"like" is not a valid choice.'}, + ) diff --git a/temba/api/v2/tests/test_contacts.py b/temba/api/v2/tests/test_contacts.py new file mode 100644 index 00000000000..c830f7b77b7 --- /dev/null +++ b/temba/api/v2/tests/test_contacts.py @@ -0,0 +1,782 @@ +from datetime import datetime, timezone as tzone +from urllib.parse import quote_plus + +from django.urls import reverse +from django.utils import timezone + +from temba.api.v2.serializers import format_datetime +from temba.contacts.models import Contact, ContactField, ContactGroup +from temba.tests import mock_mailroom + +from . import APITest + + +class ContactsEndpointTest(APITest): + def setUp(self): + super().setUp() + + self.joe = self.create_contact("Joe Blow", phone="+250788123123") + self.frank = self.create_contact("Frank", urns=["facebook:123456"]) + + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.contacts") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotPermitted(endpoint_url, [None, self.user]) + self.assertDeleteNotPermitted(endpoint_url, [None, self.user, self.agent]) + + # create some more contacts (in addition to Joe and Frank) + contact1 = self.create_contact( + "Ann", phone="0788000001", language="fra", fields={"nickname": "Annie", "gender": "female"} + ) + contact2 = self.create_contact("Bob", phone="0788000002") + contact3 = self.create_contact("Cat", phone="0788000003") + contact4 = self.create_contact( + "Don", phone="0788000004", language="fra", fields={"nickname": "Donnie", "gender": "male"} + ) + + contact1.stop(self.user) + contact2.block(self.user) + contact3.release(self.user) + + # put some contacts in a group + group = self.create_group("Customers", contacts=[self.joe, contact4]) + other_org_group = self.create_group("Nerds", org=self.org2) + + # tweak modified_on so we get the order we want + self.joe.modified_on = timezone.now() + self.joe.save(update_fields=("modified_on",)) + + survey = self.create_flow("Survey") + contact4.modified_on = timezone.now() + contact4.last_seen_on = datetime(2020, 8, 12, 13, 30, 45, 123456, tzone.utc) + contact4.current_flow = survey + contact4.save(update_fields=("modified_on", "last_seen_on", "current_flow")) + + contact1.refresh_from_db() + contact4.refresh_from_db() + self.joe.refresh_from_db() + + # create contact for other org + hans = self.create_contact("Hans", phone="0788000004", org=self.org2) + + # no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin, self.agent], + results=[contact4, self.joe, contact2, contact1, self.frank], + num_queries=self.BASE_SESSION_QUERIES + 7, + ) + self.assertEqual( + { + "uuid": contact4.uuid, + "name": "Don", + "status": "active", + "language": "fra", + "urns": ["tel:+250788000004"], + "groups": [{"uuid": group.uuid, "name": group.name}], + "notes": [], + "fields": {"nickname": "Donnie", "gender": "male"}, + "flow": {"uuid": str(survey.uuid), "name": "Survey"}, + "created_on": format_datetime(contact4.created_on), + "modified_on": format_datetime(contact4.modified_on), + "last_seen_on": "2020-08-12T13:30:45.123456Z", + "blocked": False, + "stopped": False, + }, + response.json()["results"][0], + ) + + # no filtering with token auth + response = self.assertGet( + endpoint_url, + [self.admin], + results=[contact4, self.joe, contact2, contact1, self.frank], + by_token=True, + num_queries=self.BASE_TOKEN_QUERIES + 7, + ) + + # with expanded URNs + response = self.assertGet( + endpoint_url + "?expand_urns=true", + [self.user], + results=[contact4, self.joe, contact2, contact1, self.frank], + ) + self.assertEqual( + { + "uuid": contact4.uuid, + "name": "Don", + "status": "active", + "language": "fra", + "urns": [ + { + "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, + "scheme": "tel", + "path": "+250788000004", + "display": None, + } + ], + "groups": [{"uuid": group.uuid, "name": group.name}], + "notes": [], + "fields": {"nickname": "Donnie", "gender": "male"}, + "flow": {"uuid": str(survey.uuid), "name": "Survey"}, + "created_on": format_datetime(contact4.created_on), + "modified_on": format_datetime(contact4.modified_on), + "last_seen_on": "2020-08-12T13:30:45.123456Z", + "blocked": False, + "stopped": False, + }, + response.json()["results"][0], + ) + + # reversed + response = self.assertGet( + endpoint_url + "?reverse=true", + [self.user], + results=[self.frank, contact1, contact2, self.joe, contact4], + ) + + with self.anonymous(self.org): + response = self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin, self.agent], + results=[contact4, self.joe, contact2, contact1, self.frank], + num_queries=self.BASE_SESSION_QUERIES + 7, + ) + self.assertEqual( + { + "uuid": contact4.uuid, + "name": "Don", + "anon_display": f"{contact4.id:010}", + "status": "active", + "language": "fra", + "urns": ["tel:********"], + "groups": [{"uuid": group.uuid, "name": group.name}], + "notes": [], + "fields": {"nickname": "Donnie", "gender": "male"}, + "flow": {"uuid": str(survey.uuid), "name": "Survey"}, + "created_on": format_datetime(contact4.created_on), + "modified_on": format_datetime(contact4.modified_on), + "last_seen_on": "2020-08-12T13:30:45.123456Z", + "blocked": False, + "stopped": False, + }, + response.json()["results"][0], + ) + + # with expanded URNs + response = self.assertGet( + endpoint_url + "?expand_urns=true", + [self.user], + results=[contact4, self.joe, contact2, contact1, self.frank], + ) + self.assertEqual( + { + "uuid": contact4.uuid, + "name": "Don", + "anon_display": f"{contact4.id:010}", + "status": "active", + "language": "fra", + "urns": [ + { + "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, + "scheme": "tel", + "path": "********", + "display": None, + } + ], + "groups": [{"uuid": group.uuid, "name": group.name}], + "notes": [], + "fields": {"nickname": "Donnie", "gender": "male"}, + "flow": {"uuid": str(survey.uuid), "name": "Survey"}, + "created_on": format_datetime(contact4.created_on), + "modified_on": format_datetime(contact4.modified_on), + "last_seen_on": "2020-08-12T13:30:45.123456Z", + "blocked": False, + "stopped": False, + }, + response.json()["results"][0], + ) + + # filter by UUID + self.assertGet(endpoint_url + f"?uuid={contact2.uuid}", [self.editor], results=[contact2]) + + # filter by URN (which should be normalized) + self.assertGet(endpoint_url + f"?urn={quote_plus('tel:078-8000004')}", [self.editor], results=[contact4]) + + # error if URN can't be parsed + self.assertGet(endpoint_url + "?urn=12345", [self.editor], errors={None: "Invalid URN: 12345"}) + + # filter by group UUID / name + self.assertGet(endpoint_url + f"?group={group.uuid}", [self.editor], results=[contact4, self.joe]) + self.assertGet(endpoint_url + "?group=Customers", [self.editor], results=[contact4, self.joe]) + + # filter by invalid group + self.assertGet(endpoint_url + "?group=invalid", [self.editor], results=[]) + + # filter by before / after + self.assertGet( + endpoint_url + f"?before={format_datetime(contact1.modified_on)}", + [self.editor], + results=[contact1, self.frank], + ) + self.assertGet( + endpoint_url + f"?after={format_datetime(self.joe.modified_on)}", + [self.editor], + results=[contact4, self.joe], + ) + + # view the deleted contact + self.assertGet( + endpoint_url + "?deleted=true", + [self.editor], + results=[ + { + "uuid": contact3.uuid, + "name": None, + "status": None, + "language": None, + "urns": [], + "groups": [], + "notes": [], + "fields": {}, + "flow": None, + "created_on": format_datetime(contact3.created_on), + "modified_on": format_datetime(contact3.modified_on), + "last_seen_on": None, + "blocked": None, + "stopped": None, + } + ], + ) + + # try to post something other than an object + self.assertPost( + endpoint_url, self.editor, [], errors={"non_field_errors": "Request body should be a single JSON object"} + ) + + # create an empty contact + response = self.assertPost(endpoint_url, self.editor, {}, status=201) + + empty = Contact.objects.get(name=None, is_active=True) + self.assertEqual( + { + "uuid": empty.uuid, + "name": None, + "status": "active", + "language": None, + "urns": [], + "groups": [], + "notes": [], + "fields": {"nickname": None, "gender": None}, + "flow": None, + "created_on": format_datetime(empty.created_on), + "modified_on": format_datetime(empty.modified_on), + "last_seen_on": None, + "blocked": False, + "stopped": False, + }, + response.json(), + ) + + # create with all fields but empty + response = self.assertPost( + endpoint_url, + self.editor, + {"name": None, "language": None, "urns": [], "groups": [], "fields": {}}, + status=201, + ) + + jaqen = Contact.objects.order_by("id").last() + self.assertIsNone(jaqen.name) + self.assertIsNone(jaqen.language) + self.assertEqual(Contact.STATUS_ACTIVE, jaqen.status) + self.assertEqual(set(), set(jaqen.urns.all())) + self.assertEqual(set(), set(jaqen.get_groups())) + self.assertIsNone(jaqen.fields) + + # create with all fields + self.assertPost( + endpoint_url, + self.editor, + { + "name": "Jean", + "language": "fra", + "urns": ["tel:+250783333333", "twitter:JEAN"], + "groups": [group.uuid], + "fields": {"nickname": "Jado"}, + }, + status=201, + ) + + # URNs will be normalized + nickname = self.org.fields.get(key="nickname") + gender = self.org.fields.get(key="gender") + jean = Contact.objects.filter(name="Jean", language="fra").order_by("-pk").first() + self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250783333333", "twitter:jean"}) + self.assertEqual(set(jean.get_groups()), {group}) + self.assertEqual(jean.get_field_value(nickname), "Jado") + + # try to create with group from other org + self.assertPost( + endpoint_url, + self.editor, + {"name": "Jim", "groups": [other_org_group.uuid]}, + errors={"groups": f"No such object: {other_org_group.uuid}"}, + ) + + # try to create with invalid fields + response = self.assertPost( + endpoint_url, + self.editor, + { + "name": "Jim", + "language": "xyz", + "urns": ["1234556789"], + "groups": ["59686b4e-14bc-4160-9376-b649b218c806"], + "fields": {"hmmm": "X"}, + }, + errors={ + "language": "Not a valid ISO639-3 language code.", + "groups": "No such object: 59686b4e-14bc-4160-9376-b649b218c806", + "fields": "Invalid contact field key: hmmm", + ("urns", "0"): "Invalid URN: 1234556789. Ensure phone numbers contain country codes.", + }, + ) + + # update an existing contact by UUID but don't provide any fields + self.assertPost(endpoint_url + f"?uuid={jean.uuid}", self.editor, {}) + + # contact should be unchanged + jean.refresh_from_db() + self.assertEqual(jean.name, "Jean") + self.assertEqual(jean.language, "fra") + self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250783333333", "twitter:jean"}) + self.assertEqual(set(jean.get_groups()), {group}) + self.assertEqual(jean.get_field_value(nickname), "Jado") + + # update by UUID and change all fields + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + { + "name": "Jason Undead", + "language": "ita", + "urns": ["tel:+250784444444"], + "groups": [], + "fields": {"nickname": "Žan", "gender": "frog"}, + }, + ) + + jean.refresh_from_db() + self.assertEqual(jean.name, "Jason Undead") + self.assertEqual(jean.language, "ita") + self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250784444444"}) + self.assertEqual(set(jean.get_groups()), set()) + self.assertEqual(jean.get_field_value(nickname), "Žan") + self.assertEqual(jean.get_field_value(gender), "frog") + + # change the language field + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"name": "Jean II", "language": "eng", "urns": ["tel:+250784444444"], "groups": [], "fields": {}}, + ) + + jean.refresh_from_db() + self.assertEqual(jean.name, "Jean II") + self.assertEqual(jean.language, "eng") + self.assertEqual(set(jean.urns.values_list("identity", flat=True)), {"tel:+250784444444"}) + self.assertEqual(set(jean.get_groups()), set()) + self.assertEqual(jean.get_field_value(nickname), "Žan") + + # update by uuid and remove all fields + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + { + "name": "Jean II", + "language": "eng", + "urns": ["tel:+250784444444"], + "groups": [], + "fields": {"nickname": "", "gender": ""}, + }, + ) + + jean.refresh_from_db() + self.assertEqual(jean.get_field_value(nickname), None) + self.assertEqual(jean.get_field_value(gender), None) + + # update by uuid and update/remove fields + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + { + "name": "Jean II", + "language": "eng", + "urns": ["tel:+250784444444"], + "groups": [], + "fields": {"nickname": "Jado", "gender": ""}, + }, + ) + + jean.refresh_from_db() + self.assertEqual(jean.get_field_value(nickname), "Jado") + self.assertEqual(jean.get_field_value(gender), None) + + # update by URN (which should be normalized) + self.assertPost(endpoint_url + f"?urn={quote_plus('tel:+250-78-4444444')}", self.editor, {"name": "Jean III"}) + + jean.refresh_from_db() + self.assertEqual(jean.name, "Jean III") + + # try to specify URNs field whilst referencing by URN + self.assertPost( + endpoint_url + f"?urn={quote_plus('tel:+250-78-4444444')}", + self.editor, + {"urns": ["tel:+250785555555"]}, + errors={"urns": "Field not allowed when using URN in URL"}, + ) + + # if contact doesn't exist with URN, they're created + self.assertPost( + endpoint_url + f"?urn={quote_plus('tel:+250-78-5555555')}", self.editor, {"name": "Bobby"}, status=201 + ) + + # URN should be normalized + bobby = Contact.objects.get(name="Bobby") + self.assertEqual(set(bobby.urns.values_list("identity", flat=True)), {"tel:+250785555555"}) + + # try to create a contact with a URN belonging to another contact + self.assertPost( + endpoint_url, + self.editor, + {"name": "Robert", "urns": ["tel:+250-78-5555555"]}, + errors={("urns", "0"): "URN is in use by another contact."}, + ) + + # try to update a contact with non-existent UUID + self.assertPost(endpoint_url + "?uuid=ad6acad9-959b-4d70-b144-5de2891e4d00", self.editor, {}, status=404) + + # try to update a contact in another org + self.assertPost(endpoint_url + f"?uuid={hans.uuid}", self.editor, {}, status=404) + + # try to add a contact to a dynamic group + dyn_group = self.create_group("Dynamic Group", query="name = Frank") + ContactGroup.objects.filter(id=dyn_group.id).update(status=ContactGroup.STATUS_READY) + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"groups": [dyn_group.uuid]}, + errors={"groups": "Contact group must not be query based: %s" % dyn_group.uuid}, + ) + + # try to give a contact more than 100 URNs + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"urns": ["twitter:bob%d" % u for u in range(101)]}, + errors={"urns": "Ensure this field has no more than 100 elements."}, + ) + + # try to give a contact more than 100 contact fields + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"fields": {"field_%d" % f: f for f in range(101)}}, + errors={"fields": "Ensure this field has no more than 100 elements."}, + ) + + # ok to give them 100 URNs + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"urns": ["twitter:bob%d" % u for u in range(100)]}, + ) + self.assertEqual(jean.urns.count(), 100) + + # try to move a blocked contact into a group + jean.block(self.user) + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"groups": [group.uuid]}, + errors={"groups": "Non-active contacts can't be added to groups"}, + ) + + # try to update a contact by both UUID and URN + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}&urn={quote_plus('tel:+250784444444')}", + self.editor, + {}, + errors={None: "URL can only contain one of the following parameters: urn, uuid"}, + ) + + # try an empty delete request + self.assertDelete( + endpoint_url, + self.editor, + errors={None: "URL must contain one of the following parameters: urn, uuid"}, + ) + + # delete a contact by UUID + self.assertDelete(endpoint_url + f"?uuid={jean.uuid}", self.editor, status=204) + + jean.refresh_from_db() + self.assertFalse(jean.is_active) + + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.admin, + {}, + errors={"non_field_errors": "Deleted contacts can't be modified."}, + ) + + # create xavier + self.assertPost( + endpoint_url, self.admin, {"name": "Xavier", "urns": ["tel:+250-78-7777777", "twitter:XAVIER"]}, status=201 + ) + + xavier = Contact.objects.get(name="Xavier") + self.assertEqual(set(xavier.urns.values_list("identity", flat=True)), {"twitter:xavier", "tel:+250787777777"}) + + # updating fields by urn should keep all exiting urns + self.assertPost( + endpoint_url + f"?urn={quote_plus('tel:+250787777777')}", self.admin, {"fields": {"gender": "Male"}} + ) + + xavier.refresh_from_db() + self.assertEqual(set(xavier.urns.values_list("identity", flat=True)), {"twitter:xavier", "tel:+250787777777"}) + self.assertEqual(xavier.get_field_value(gender), "Male") + + # delete a contact by URN (which should be normalized) + self.assertDelete(endpoint_url + f"?urn={quote_plus('twitter:XAVIER')}", self.editor, status=204) + + xavier.refresh_from_db() + self.assertFalse(xavier.is_active) + + # try deleting a contact by a non-existent URN + self.assertDelete(endpoint_url + "?urn=twitter:billy", self.editor, status=404) + + # try to delete a contact in another org + self.assertDelete(endpoint_url + f"?uuid={hans.uuid}", self.editor, status=404) + + # add some notes for frank + frank_url = endpoint_url + f"?uuid={self.frank.uuid}" + for i in range(1, 6): + self.assertPost( + frank_url, + self.admin, + {"note": f"Frank is a good guy ({i})"}, + ) + + # four more notes by another user to make sure prefetch works + for i in range(6, 10): + self.assertPost( + frank_url, + self.editor, + {"note": f"Frank is an okay guy ({i})"}, + ) + + self.frank.refresh_from_db() + response = self.assertGet( + frank_url, [self.editor], results=[self.frank], num_queries=self.BASE_SESSION_QUERIES + 7 + ) + + # our oldest note should be number 5 + self.assertEqual( + "Frank is a good guy (5)", + response.json()["results"][0]["notes"][0]["text"], + ) + + # our newest note should be number 6 + self.assertEqual( + "Frank is an okay guy (9)", + response.json()["results"][0]["notes"][-1]["text"], + ) + + @mock_mailroom + def test_as_agent(self, mr_mocks): + endpoint_url = reverse("api.v2.contacts") + ".json" + + self.create_field("gender", "Gender", ContactField.TYPE_TEXT, agent_access=ContactField.ACCESS_NONE) + self.create_field("age", "Age", ContactField.TYPE_NUMBER, agent_access=ContactField.ACCESS_VIEW) + self.create_field("height", "Height", ContactField.TYPE_NUMBER, agent_access=ContactField.ACCESS_EDIT) + + contact = self.create_contact( + "Bob", urns=["telegram:12345"], fields={"gender": "M", "age": "40", "height": "180"} + ) + + # fetching a contact returns only the fields that agents can access + self.assertGet( + endpoint_url + f"?uuid={contact.uuid}", + [self.agent], + results=[ + { + "uuid": str(contact.uuid), + "name": "Bob", + "status": "active", + "language": None, + "urns": ["telegram:12345"], + "groups": [], + "notes": [], + "fields": {"age": "40", "height": "180"}, + "flow": None, + "created_on": format_datetime(contact.created_on), + "modified_on": format_datetime(contact.modified_on), + "last_seen_on": None, + "blocked": False, + "stopped": False, + } + ], + ) + + # can't edit the field that we don't have any access to + self.assertPost( + endpoint_url + f"?uuid={contact.uuid}", + self.agent, + {"fields": {"gender": "M"}}, + errors={"fields": "Invalid contact field key: gender"}, + ) + + # nor the field that we have view access to + self.assertPost( + endpoint_url + f"?uuid={contact.uuid}", + self.agent, + {"fields": {"age": "30"}}, + errors={"fields": "Editing of 'age' values disallowed for current user."}, + ) + + # but can edit the field we have edit access for + self.assertPost( + endpoint_url + f"?uuid={contact.uuid}", + self.agent, + {"fields": {"height": "160"}}, + ) + + def test_prevent_null_chars(self): + endpoint_url = reverse("api.v2.contacts") + ".json" + + self.create_field("string_field", "String") + self.create_field("number_field", "Number", value_type=ContactField.TYPE_NUMBER) + + # test create with a null chars \u0000 + self.login(self.admin) + self.assertPost( + endpoint_url, + self.editor, + { + "name": "Jean", + "urns": ["tel:+250783333334"], + "fields": {"string_field": "crayons on the wall \u0000, pudding on the wall \x00, yeah \0"}, + }, + errors={("fields", "string_field"): "Null characters are not allowed."}, + ) + + @mock_mailroom + def test_update_datetime_field(self, mr_mocks): + endpoint_url = reverse("api.v2.contacts") + ".json" + + self.create_field("activated_at", "Tag activation", ContactField.TYPE_DATETIME) + + # update contact with valid date format for the org - DD-MM-YYYY + response = self.assertPost( + endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "31-12-2017"}} + ) + self.assertIsNotNone(response.json()["fields"]["activated_at"]) + + # update contact with valid ISO8601 timestamp value with timezone + response = self.assertPost( + endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "2017-11-11T11:12:13Z"}} + ) + self.assertEqual(response.json()["fields"]["activated_at"], "2017-11-11T13:12:13+02:00") + + # update contact with valid ISO8601 timestamp value, 'T' replaced with space + response = self.assertPost( + endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "2017-11-11 11:12:13Z"}} + ) + self.assertEqual(response.json()["fields"]["activated_at"], "2017-11-11T13:12:13+02:00") + + # update contact with invalid ISO8601 timestamp value without timezone + response = self.assertPost( + endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "2017-11-11T11:12:13"}} + ) + self.assertIsNone(response.json()["fields"]["activated_at"]) + + # update contact with invalid date format for the org - MM-DD-YYYY + response = self.assertPost( + endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "12-31-2017"}} + ) + self.assertIsNone(response.json()["fields"]["activated_at"]) + + # update contact with invalid timestamp value + response = self.assertPost( + endpoint_url + f"?uuid={self.joe.uuid}", self.editor, {"fields": {"activated_at": "el123a41"}} + ) + self.assertIsNone(response.json()["fields"]["activated_at"]) + + @mock_mailroom + def test_anonymous_org(self, mr_mocks): + endpoint_url = reverse("api.v2.contacts") + ".json" + + group = ContactGroup.get_or_create(self.org, self.admin, "Customers") + + self.assertPost( + endpoint_url, + self.editor, + { + "name": "Jean", + "language": "fra", + "urns": ["tel:+250783333333", "twitter:JEAN"], + "groups": [group.uuid], + "fields": {}, + }, + status=201, + ) + + jean = Contact.objects.filter(name="Jean", language="fra").get() + + with self.anonymous(self.org): + # can't update via URN + self.assertPost( + endpoint_url + "?urn=tel:+250785555555", + self.editor, + {}, + errors={None: "URN lookups not allowed for anonymous organizations"}, + status=400, + ) + + # can't update contact URNs + self.assertPost( + endpoint_url + f"?uuid={jean.uuid}", + self.editor, + {"urns": ["tel:+250786666666"]}, + errors={"urns": "Updating URNs not allowed for anonymous organizations"}, + status=400, + ) + + # output shouldn't include URNs + response = self.assertGet(endpoint_url + f"?uuid={jean.uuid}", [self.admin], results=[jean]) + self.assertEqual(response.json()["results"][0]["urns"], ["tel:********", "twitter:********"]) + + # but can create with URNs + response = self.assertPost( + endpoint_url, + self.admin, + {"name": "Xavier", "urns": ["tel:+250-78-7777777", "twitter:XAVIER"]}, + status=201, + ) + + # TODO should UUID be masked in response?? + xavier = Contact.objects.get(name="Xavier") + self.assertEqual( + set(xavier.urns.values_list("identity", flat=True)), {"tel:+250787777777", "twitter:xavier"} + ) + + # can't filter by URN + self.assertGet( + endpoint_url + f"?urn={quote_plus('tel:+250-78-8000004')}", + [self.admin], + errors={None: "URN lookups not allowed for anonymous organizations"}, + ) diff --git a/temba/api/v2/tests/test_definitions.py b/temba/api/v2/tests/test_definitions.py new file mode 100644 index 00000000000..f07a093ed1b --- /dev/null +++ b/temba/api/v2/tests/test_definitions.py @@ -0,0 +1,115 @@ +from django.urls import reverse + +from temba.campaigns.models import Campaign +from temba.flows.models import Flow + +from . import APITest + + +class DefinitionsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.definitions") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + self.import_file("test_flows/subflow.json") + flow = Flow.objects.get(name="Parent Flow") + + # all flow dependencies and we should get the child flow + self.assertGet( + endpoint_url + f"?flow={flow.uuid}", + [self.editor], + raw=lambda j: {f["name"] for f in j["flows"]} == {"Child Flow", "Parent Flow"}, + ) + + # export just the parent flow + self.assertGet( + endpoint_url + f"?flow={flow.uuid}&dependencies=none", + [self.editor], + raw=lambda j: {f["name"] for f in j["flows"]} == {"Parent Flow"}, + ) + + # import the clinic app which has campaigns + self.import_file("test_flows/the_clinic.json") + + # our catchall flow, all alone + flow = Flow.objects.get(name="Catch All") + self.assertGet( + endpoint_url + f"?flow={flow.uuid}&dependencies=none", + [self.editor], + raw=lambda j: len(j["flows"]) == 1 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 0, + ) + + # with its trigger dependency + self.assertGet( + endpoint_url + f"?flow={flow.uuid}", + [self.editor], + raw=lambda j: len(j["flows"]) == 1 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 1, + ) + + # our registration flow, all alone + flow = Flow.objects.get(name="Register Patient") + self.assertGet( + endpoint_url + f"?flow={flow.uuid}&dependencies=none", + [self.editor], + raw=lambda j: len(j["flows"]) == 1 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 0, + ) + + # touches a lot of stuff + self.assertGet( + endpoint_url + f"?flow={flow.uuid}", + [self.editor], + raw=lambda j: len(j["flows"]) == 6 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 2, + ) + + # ignore campaign dependencies + self.assertGet( + endpoint_url + f"?flow={flow.uuid}&dependencies=flows", + [self.editor], + raw=lambda j: len(j["flows"]) == 2 and len(j["campaigns"]) == 0 and len(j["triggers"]) == 1, + ) + + # add our missed call flow + missed_call = Flow.objects.get(name="Missed Call") + self.assertGet( + endpoint_url + f"?flow={flow.uuid}&flow={missed_call.uuid}&dependencies=all", + [self.editor], + raw=lambda j: len(j["flows"]) == 7 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 3, + ) + + campaign = Campaign.objects.get(name="Appointment Schedule") + self.assertGet( + endpoint_url + f"?campaign={campaign.uuid}&dependencies=none", + [self.editor], + raw=lambda j: len(j["flows"]) == 0 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 0, + ) + + self.assertGet( + endpoint_url + f"?campaign={campaign.uuid}", + [self.editor], + raw=lambda j: len(j["flows"]) == 6 and len(j["campaigns"]) == 1 and len(j["triggers"]) == 2, + ) + + # test an invalid value for dependencies + self.assertGet( + endpoint_url + f"?flow={flow.uuid}&dependencies=xx", + [self.editor], + errors={None: "dependencies must be one of none, flows, all"}, + ) + + # test that flows are migrated + self.import_file("test_flows/favorites_v13.json") + + flow = Flow.objects.get(name="Favorites") + self.assertGet( + endpoint_url + f"?flow={flow.uuid}", + [self.editor], + raw=lambda j: len(j["flows"]) == 1 and j["flows"][0]["spec_version"] == Flow.CURRENT_SPEC_VERSION, + ) + + # test fetching docs anonymously + self.client.logout() + response = self.client.get(reverse("api.v2.definitions")) + self.assertContains(response, "Deprecated endpoint") diff --git a/temba/api/v2/tests/test_fields.py b/temba/api/v2/tests/test_fields.py new file mode 100644 index 00000000000..e35517d50d9 --- /dev/null +++ b/temba/api/v2/tests/test_fields.py @@ -0,0 +1,159 @@ +from django.test import override_settings +from django.urls import reverse + +from temba.campaigns.models import Campaign, CampaignEvent +from temba.contacts.models import ContactField + +from . import APITest + + +class FieldsEndpointTest(APITest): + @override_settings(ORG_LIMIT_DEFAULTS={"fields": 10}) + def test_endpoint(self): + endpoint_url = reverse("api.v2.fields") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotPermitted(endpoint_url, [None, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + nick_name = self.create_field("nick_name", "Nick Name", agent_access=ContactField.ACCESS_EDIT) + registered = self.create_field("registered", "Registered On", value_type=ContactField.TYPE_DATETIME) + self.create_field("not_ours", "Something Else", org=self.org2) + + # add our date field to some campaign events + campaign = Campaign.create(self.org, self.admin, "Reminders", self.create_group("Farmers")) + CampaignEvent.create_flow_event( + self.org, self.admin, campaign, registered, offset=1, unit="W", flow=self.create_flow("Event 1") + ) + CampaignEvent.create_flow_event( + self.org, self.admin, campaign, registered, offset=2, unit="W", flow=self.create_flow("Event 2") + ) + + # and some regular flows + self.create_flow("Flow 1").field_dependencies.add(registered) + self.create_flow("Flow 2").field_dependencies.add(registered) + self.create_flow("Flow 3").field_dependencies.add(registered) + + # and a group + self.create_group("Farmers").query_fields.add(registered) + + deleted = self.create_field("deleted", "Deleted") + deleted.release(self.admin) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + { + "key": "registered", + "name": "Registered On", + "type": "datetime", + "featured": False, + "priority": 0, + "usages": {"campaign_events": 2, "flows": 3, "groups": 1}, + "agent_access": "view", + "label": "Registered On", + "value_type": "datetime", + }, + { + "key": "nick_name", + "name": "Nick Name", + "type": "text", + "featured": False, + "priority": 0, + "usages": {"campaign_events": 0, "flows": 0, "groups": 0}, + "agent_access": "edit", + "label": "Nick Name", + "value_type": "text", + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 1, + ) + + # filter by key + self.assertGet(endpoint_url + "?key=nick_name", [self.editor], results=[nick_name]) + + # try to create empty field + self.assertPost(endpoint_url, self.admin, {}, errors={"non_field_errors": "Field 'name' is required."}) + + # try to create field without type + self.assertPost( + endpoint_url, self.admin, {"name": "goats"}, errors={"non_field_errors": "Field 'type' is required."} + ) + + # try again with some invalid values + self.assertPost( + endpoint_url, + self.admin, + {"name": "!@#$%", "type": "video"}, + errors={"name": "Can only contain letters, numbers and hypens.", "type": '"video" is not a valid choice.'}, + ) + + # try again with some invalid values using deprecated field names + self.assertPost( + endpoint_url, + self.admin, + {"label": "!@#$%", "value_type": "video"}, + errors={ + "label": "Can only contain letters, numbers and hypens.", + "value_type": '"video" is not a valid choice.', + }, + ) + + # try again with a label that would generate an invalid key + self.assertPost( + endpoint_url, + self.admin, + {"name": "HAS", "type": "text"}, + errors={"name": 'Generated key "has" is invalid or a reserved name.'}, + ) + + # try again with a label that's already taken + self.assertPost( + endpoint_url, + self.admin, + {"label": "nick name", "value_type": "text"}, + errors={"label": "This field must be unique."}, + ) + + # create a new field + self.assertPost(endpoint_url, self.editor, {"name": "Age", "type": "number"}, status=201) + + age = ContactField.objects.get( + org=self.org, name="Age", value_type="N", is_proxy=False, is_system=False, is_active=True + ) + + # update a field by its key + self.assertPost(endpoint_url + "?key=age", self.admin, {"name": "Real Age", "type": "datetime"}) + age.refresh_from_db() + self.assertEqual(age.name, "Real Age") + self.assertEqual(age.value_type, "D") + + # try to update with key of deleted field + self.assertPost(endpoint_url + "?key=deleted", self.admin, {"name": "Something", "type": "text"}, status=404) + + # try to update with non-existent key + self.assertPost(endpoint_url + "?key=not_ours", self.admin, {"name": "Something", "type": "text"}, status=404) + + # try to change type of date field used by campaign event + self.assertPost( + endpoint_url + "?key=registered", + self.admin, + {"name": "Registered", "type": "text"}, + errors={"type": "Can't change type of date field being used by campaign events."}, + ) + + CampaignEvent.objects.all().delete() + ContactField.objects.filter(is_system=False).delete() + + for i in range(10): + self.create_field("field%d" % i, "Field%d" % i) + + self.assertPost( + endpoint_url, + self.admin, + {"label": "Age", "value_type": "numeric"}, + errors={None: "Cannot create object because workspace has reached limit of 10."}, + status=409, + ) diff --git a/temba/api/v2/tests/test_flow_starts.py b/temba/api/v2/tests/test_flow_starts.py new file mode 100644 index 00000000000..31a6a7cda3f --- /dev/null +++ b/temba/api/v2/tests/test_flow_starts.py @@ -0,0 +1,295 @@ +from unittest.mock import patch + +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.flows.models import FlowStart + +from . import APITest + + +class FlowStartsEndpointTest(APITest): + @patch("temba.flows.models.FlowStart.async_start") + def test_endpoint(self, mock_async_start): + endpoint_url = reverse("api.v2.flow_starts") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.agent, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + flow = self.create_flow("Test") + + # try to create an empty flow start + self.assertPost(endpoint_url, self.editor, {}, errors={"flow": "This field is required."}) + + # start a flow with the minimum required parameters + joe = self.create_contact("Joe Blow", phone="+250788123123") + response = self.assertPost(endpoint_url, self.editor, {"flow": flow.uuid, "contacts": [joe.uuid]}, status=201) + + start1 = flow.starts.get(id=response.json()["id"]) + self.assertEqual(start1.flow, flow) + self.assertEqual(set(start1.contacts.all()), {joe}) + self.assertEqual(set(start1.groups.all()), set()) + self.assertEqual(start1.exclusions, {"in_a_flow": False, "started_previously": False}) + self.assertEqual(start1.params, {}) + + # check we tried to start the new flow start + mock_async_start.assert_called_once() + mock_async_start.reset_mock() + + # start a flow with all parameters + hans = self.create_contact("Hans Gruber", phone="+4921551511") + hans_group = self.create_group("hans", contacts=[hans]) + response = self.assertPost( + endpoint_url, + self.admin, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": False, + "extra": {"first_name": "Ryan", "last_name": "Lewis"}, + }, + status=201, + ) + + # assert our new start + start2 = flow.starts.get(id=response.json()["id"]) + self.assertEqual(start2.flow, flow) + self.assertEqual(start2.start_type, FlowStart.TYPE_API) + self.assertEqual(["tel:+12067791212"], start2.urns) + self.assertEqual({joe}, set(start2.contacts.all())) + self.assertEqual({hans_group}, set(start2.groups.all())) + self.assertEqual(start2.exclusions, {"in_a_flow": False, "started_previously": True}) + self.assertEqual(start2.params, {"first_name": "Ryan", "last_name": "Lewis"}) + + # check we tried to start the new flow start + mock_async_start.assert_called_once() + mock_async_start.reset_mock() + + response = self.assertPost( + endpoint_url, + self.admin, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": False, + "extra": {"first_name": "Ryan", "last_name": "Lewis"}, + "params": {"first_name": "Bob", "last_name": "Marley"}, + }, + status=201, + ) + + # assert our new start + start3 = flow.starts.get(id=response.json()["id"]) + self.assertEqual(start3.flow, flow) + self.assertEqual(["tel:+12067791212"], start3.urns) + self.assertEqual({joe}, set(start3.contacts.all())) + self.assertEqual({hans_group}, set(start3.groups.all())) + self.assertEqual(start3.exclusions, {"in_a_flow": False, "started_previously": True}) + self.assertEqual(start3.params, {"first_name": "Bob", "last_name": "Marley"}) + + # check we tried to start the new flow start + mock_async_start.assert_called_once() + mock_async_start.reset_mock() + + # calls from Zapier have user-agent set to Zapier + response = self.assertPost( + endpoint_url, + self.admin, + {"contacts": [joe.uuid], "flow": flow.uuid}, + HTTP_USER_AGENT="Zapier", + status=201, + ) + + # assert our new start has start_type of Zapier + start4 = flow.starts.get(id=response.json()["id"]) + self.assertEqual(FlowStart.TYPE_API_ZAPIER, start4.start_type) + + # try to start a flow with no contact/group/URN + self.assertPost( + endpoint_url, + self.admin, + {"flow": flow.uuid, "restart_participants": True}, + errors={"non_field_errors": "Must specify at least one group, contact or URN"}, + ) + + # should raise validation error for invalid JSON in extra + self.assertPost( + endpoint_url, + self.admin, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": False, + "extra": "YES", + }, + errors={"extra": "Must be a valid JSON object"}, + ) + + # a list is valid JSON, but extra has to be a dict + self.assertPost( + endpoint_url, + self.admin, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": False, + "extra": [1], + }, + errors={"extra": "Must be a valid JSON object"}, + ) + + self.assertPost( + endpoint_url, + self.admin, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": False, + "params": "YES", + }, + errors={"params": "Must be a valid JSON object"}, + ) + + # a list is valid JSON, but extra has to be a dict + self.assertPost( + endpoint_url, + self.admin, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": False, + "params": [1], + }, + errors={"params": "Must be a valid JSON object"}, + ) + + # invalid URN + self.assertPost( + endpoint_url, + self.admin, + {"flow": flow.uuid, "urns": ["foo:bar"], "contacts": [joe.uuid]}, + errors={("urns", "0"): "Invalid URN: foo:bar. Ensure phone numbers contain country codes."}, + ) + + # invalid contact uuid + self.assertPost( + endpoint_url, + self.admin, + {"flow": flow.uuid, "urns": ["tel:+12067791212"], "contacts": ["abcde"]}, + errors={"contacts": "No such object: abcde"}, + ) + + # invalid group uuid + self.assertPost( + endpoint_url, + self.admin, + {"flow": flow.uuid, "urns": ["tel:+12067791212"], "groups": ["abcde"]}, + errors={"groups": "No such object: abcde"}, + ) + + # invalid flow uuid + self.assertPost( + endpoint_url, + self.admin, + { + "flow": "abcde", + "urns": ["tel:+12067791212"], + }, + errors={"flow": "No such object: abcde"}, + ) + + # too many groups + group_uuids = [] + for g in range(101): + group_uuids.append(self.create_group("Group %d" % g).uuid) + + self.assertPost( + endpoint_url, + self.admin, + {"flow": flow.uuid, "groups": group_uuids}, + errors={"groups": "Ensure this field has no more than 100 elements."}, + ) + + # check fetching with no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[start4, start3, start2, start1], + num_queries=self.BASE_SESSION_QUERIES + 5, + ) + self.assertEqual( + response.json()["results"][1], + { + "uuid": str(start3.uuid), + "flow": {"uuid": flow.uuid, "name": "Test"}, + "contacts": [{"uuid": joe.uuid, "name": "Joe Blow"}], + "groups": [{"uuid": hans_group.uuid, "name": "hans"}], + "status": "pending", + "progress": {"total": -1, "started": 0}, + "params": {"first_name": "Bob", "last_name": "Marley"}, + "created_on": format_datetime(start3.created_on), + "modified_on": format_datetime(start3.modified_on), + # deprecated + "id": start3.id, + "extra": {"first_name": "Bob", "last_name": "Marley"}, + "restart_participants": False, + "exclude_active": False, + }, + ) + + # check filtering by UUID + self.assertGet(endpoint_url + f"?uuid={start2.uuid}", [self.admin], results=[start2]) + + # check filtering by in invalid UUID + self.assertGet(endpoint_url + "?uuid=xyz", [self.editor], errors={None: "Value for uuid must be a valid UUID"}) + + response = self.assertPost( + endpoint_url, + self.editor, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": True, + "exclude_active": False, + "extra": {"first_name": "Ryan", "last_name": "Lewis"}, + "params": {"first_name": "Bob", "last_name": "Marley"}, + }, + status=201, + ) + + start4 = flow.starts.get(id=response.json()["id"]) + self.assertEqual({"started_previously": False, "in_a_flow": False}, start4.exclusions) + + response = self.assertPost( + endpoint_url, + self.editor, + { + "urns": ["tel:+12067791212"], + "contacts": [joe.uuid], + "groups": [hans_group.uuid], + "flow": flow.uuid, + "restart_participants": True, + "exclude_active": True, + "extra": {"first_name": "Ryan", "last_name": "Lewis"}, + "params": {"first_name": "Bob", "last_name": "Marley"}, + }, + status=201, + ) + + start5 = flow.starts.get(id=response.json()["id"]) + self.assertEqual({"started_previously": False, "in_a_flow": True}, start5.exclusions) diff --git a/temba/api/v2/tests/test_flows.py b/temba/api/v2/tests/test_flows.py new file mode 100644 index 00000000000..3683ba57dba --- /dev/null +++ b/temba/api/v2/tests/test_flows.py @@ -0,0 +1,165 @@ +from django.urls import reverse +from django.utils import timezone + +from temba.api.v2.serializers import format_datetime +from temba.flows.models import Flow, FlowLabel, FlowRun +from temba.tests import matchers + +from . import APITest + + +class FlowsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.flows") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + survey = self.get_flow("media_survey") + color = self.get_flow("color") + archived = self.get_flow("favorites") + archived.archive(self.admin) + + # add a campaign message flow that should be filtered out + Flow.create_single_message(self.org, self.admin, dict(eng="Hello world"), "eng") + + # add a flow label + reporting = FlowLabel.create(self.org, self.admin, "Reporting") + color.labels.add(reporting) + + # make it look like joe completed the color flow + joe = self.create_contact("Joe Blow", phone="+250788123123") + FlowRun.objects.create( + org=self.org, flow=color, contact=joe, status=FlowRun.STATUS_COMPLETED, exited_on=timezone.now() + ) + + # flow belong to other org + other_org = self.create_flow("Other", org=self.org2) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + { + "uuid": archived.uuid, + "name": "Favorites", + "type": "message", + "archived": True, + "labels": [], + "expires": 720, + "runs": {"active": 0, "waiting": 0, "completed": 0, "interrupted": 0, "expired": 0, "failed": 0}, + "results": [ + { + "key": "color", + "name": "Color", + "categories": ["Red", "Green", "Blue", "Cyan", "Other"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "beer", + "name": "Beer", + "categories": ["Mutzig", "Primus", "Turbo King", "Skol", "Other"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "name", + "name": "Name", + "categories": ["All Responses"], + "node_uuids": [matchers.UUID4String()], + }, + ], + "parent_refs": [], + "created_on": format_datetime(archived.created_on), + "modified_on": format_datetime(archived.modified_on), + }, + { + "uuid": color.uuid, + "name": "Color Flow", + "type": "message", + "archived": False, + "labels": [{"uuid": str(reporting.uuid), "name": "Reporting"}], + "expires": 10080, + "runs": {"active": 0, "waiting": 0, "completed": 1, "interrupted": 0, "expired": 0, "failed": 0}, + "results": [ + { + "key": "color", + "name": "color", + "categories": ["Orange", "Blue", "Other", "Nothing"], + "node_uuids": [matchers.UUID4String()], + } + ], + "parent_refs": [], + "created_on": format_datetime(color.created_on), + "modified_on": format_datetime(color.modified_on), + }, + { + "uuid": survey.uuid, + "name": "Media Survey", + "type": "survey", + "archived": False, + "labels": [], + "expires": 10080, + "runs": {"active": 0, "waiting": 0, "completed": 0, "interrupted": 0, "expired": 0, "failed": 0}, + "results": [ + { + "key": "name", + "name": "Name", + "categories": ["All Responses"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "photo", + "name": "Photo", + "categories": ["All Responses"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "location", + "name": "Location", + "categories": ["All Responses"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "video", + "name": "Video", + "categories": ["All Responses"], + "node_uuids": [matchers.UUID4String()], + }, + ], + "parent_refs": [], + "created_on": format_datetime(survey.created_on), + "modified_on": format_datetime(survey.modified_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 3, + ) + + self.assertGet(endpoint_url, [self.admin2], results=[other_org]) + + # filter by key + self.assertGet(endpoint_url + f"?uuid={color.uuid}", [self.editor], results=[color]) + + # filter by type + self.assertGet(endpoint_url + "?type=message", [self.editor], results=[archived, color]) + self.assertGet(endpoint_url + "?type=survey", [self.editor], results=[survey]) + + # filter by archived + self.assertGet(endpoint_url + "?archived=1", [self.editor], results=[archived]) + self.assertGet(endpoint_url + "?archived=0", [self.editor], results=[color, survey]) + self.assertGet(endpoint_url + "?archived=false", [self.editor], results=[color, survey]) + + # filter by before / after + self.assertGet( + endpoint_url + f"?before={format_datetime(color.modified_on)}", [self.editor], results=[color, survey] + ) + self.assertGet( + endpoint_url + f"?after={format_datetime(color.modified_on)}", [self.editor], results=[archived, color] + ) + + # inactive flows are never returned + archived.is_active = False + archived.save() + + self.assertGet(endpoint_url, [self.editor], results=[color, survey]) diff --git a/temba/api/v2/tests/test_globals.py b/temba/api/v2/tests/test_globals.py new file mode 100644 index 00000000000..f30bd40d017 --- /dev/null +++ b/temba/api/v2/tests/test_globals.py @@ -0,0 +1,126 @@ +from django.test import override_settings +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.globals.models import Global + +from . import APITest + + +class GlobalsEndpointTest(APITest): + @override_settings(ORG_LIMIT_DEFAULTS={"globals": 3}) + def test_endpoint(self): + endpoint_url = reverse("api.v2.globals") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotAllowed(endpoint_url) + + # create some globals + deleted = Global.get_or_create(self.org, self.admin, "org_name", "Org Name", "Acme Ltd") + deleted.release(self.admin) + + global1 = Global.get_or_create(self.org, self.admin, "org_name", "Org Name", "Acme Ltd") + global2 = Global.get_or_create(self.org, self.admin, "access_token", "Access Token", "23464373") + + # on another org + global3 = Global.get_or_create(self.org2, self.admin, "thingy", "Thingy", "xyz") + + # check no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + { + "key": "access_token", + "name": "Access Token", + "value": "23464373", + "modified_on": format_datetime(global2.modified_on), + }, + { + "key": "org_name", + "name": "Org Name", + "value": "Acme Ltd", + "modified_on": format_datetime(global1.modified_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 1, + ) + + # check no filtering with token auth + response = self.assertGet( + endpoint_url, + [self.editor, self.admin], + results=[global2, global1], + by_token=True, + num_queries=self.BASE_TOKEN_QUERIES + 1, + ) + + self.assertGet(endpoint_url, [self.admin2], results=[global3]) + + # filter by key + self.assertGet(endpoint_url + "?key=org_name", [self.editor], results=[global1]) + + # filter by before / after + self.assertGet( + endpoint_url + f"?before={format_datetime(global1.modified_on)}", [self.editor], results=[global1] + ) + self.assertGet( + endpoint_url + f"?after={format_datetime(global1.modified_on)}", [self.editor], results=[global2, global1] + ) + + # lets change a global + self.assertPost(endpoint_url + "?key=org_name", self.admin, {"value": "Acme LLC"}) + global1.refresh_from_db() + self.assertEqual(global1.value, "Acme LLC") + + # try to create a global with no name + response = self.assertPost( + endpoint_url, + self.admin, + {"value": "yes"}, + errors={"non_field_errors": "Name is required when creating new global."}, + ) + + # try to create a global with invalid name + response = self.assertPost( + endpoint_url, self.admin, {"name": "!!!#$%^"}, errors={"name": "Name contains illegal characters."} + ) + + # try to create a global with name that creates an invalid key + response = self.assertPost( + endpoint_url, + self.admin, + {"name": "2cool key", "value": "23464373"}, + errors={"name": "Name creates Key that is invalid"}, + ) + + # try to create a global with name that's too long + response = self.assertPost( + endpoint_url, + self.admin, + {"name": "x" * 37}, + errors={"name": "Ensure this field has no more than 36 characters."}, + ) + + # lets create a new global + response = self.assertPost(endpoint_url, self.admin, {"name": "New Global", "value": "23464373"}, status=201) + global3 = Global.objects.get(key="new_global") + self.assertEqual( + response.json(), + { + "key": "new_global", + "name": "New Global", + "value": "23464373", + "modified_on": format_datetime(global3.modified_on), + }, + ) + + # try again now that we've hit the mocked limit of globals per org + self.assertPost( + endpoint_url, + self.admin, + {"name": "Website URL", "value": "http://example.com"}, + errors={None: "Cannot create object because workspace has reached limit of 3."}, + status=409, + ) diff --git a/temba/api/v2/tests/test_groups.py b/temba/api/v2/tests/test_groups.py new file mode 100644 index 00000000000..ccdfcae5cd4 --- /dev/null +++ b/temba/api/v2/tests/test_groups.py @@ -0,0 +1,221 @@ +from django.test import override_settings +from django.urls import reverse + +from temba.campaigns.models import Campaign +from temba.contacts.models import ContactGroup +from temba.tests import mock_mailroom +from temba.triggers.models import Trigger + +from . import APITest + + +class GroupsEndpointTest(APITest): + @override_settings(ORG_LIMIT_DEFAULTS={"groups": 10}) + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.groups") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotPermitted(endpoint_url, [None, self.user, self.agent]) + + frank = self.create_contact("Frank", urns=["facebook:123456"]) + self.create_field("isdeveloper", "Is developer") + open_tickets = self.org.groups.get(name="Open Tickets") + customers = self.create_group("Customers", [frank]) + developers = self.create_group("Developers", query='isdeveloper = "YES"') + ContactGroup.objects.filter(id=developers.id).update(status=ContactGroup.STATUS_READY) + + dynamic = self.create_group("Big Group", query='isdeveloper = "NO"') + ContactGroup.objects.filter(id=dynamic.id).update(status=ContactGroup.STATUS_EVALUATING) + + # an initializing group + ContactGroup.create_manual(self.org, self.admin, "Initializing", status=ContactGroup.STATUS_INITIALIZING) + + # group belong to other org + spammers = ContactGroup.get_or_create(self.org2, self.admin2, "Spammers") + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + { + "uuid": dynamic.uuid, + "name": "Big Group", + "query": 'isdeveloper = "NO"', + "status": "evaluating", + "system": False, + "count": 0, + }, + { + "uuid": developers.uuid, + "name": "Developers", + "query": 'isdeveloper = "YES"', + "status": "ready", + "system": False, + "count": 0, + }, + { + "uuid": customers.uuid, + "name": "Customers", + "query": None, + "status": "ready", + "system": False, + "count": 1, + }, + { + "uuid": open_tickets.uuid, + "name": "Open Tickets", + "query": "tickets > 0", + "status": "ready", + "system": True, + "count": 0, + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by UUID + self.assertGet(endpoint_url + f"?uuid={customers.uuid}", [self.editor], results=[customers]) + + # filter by name + self.assertGet(endpoint_url + "?name=developers", [self.editor], results=[developers]) + + # try to filter by both + self.assertGet( + endpoint_url + f"?uuid={customers.uuid}&name=developers", + [self.editor], + errors={None: "You may only specify one of the uuid, name parameters"}, + ) + + # try to create empty group + self.assertPost(endpoint_url, self.admin, {}, errors={"name": "This field is required."}) + + # create new group + response = self.assertPost(endpoint_url, self.admin, {"name": "Reporters"}, status=201) + + reporters = ContactGroup.objects.get(name="Reporters") + self.assertEqual( + response.json(), + { + "uuid": reporters.uuid, + "name": "Reporters", + "query": None, + "status": "ready", + "system": False, + "count": 0, + }, + ) + + # try to create another group with same name + self.assertPost(endpoint_url, self.admin, {"name": "reporters"}, errors={"name": "This field must be unique."}) + + # it's fine if a group in another org has that name + self.assertPost(endpoint_url, self.admin, {"name": "Spammers"}, status=201) + + # try to create a group with invalid name + self.assertPost( + endpoint_url, self.admin, {"name": '"People"'}, errors={"name": 'Cannot contain the character: "'} + ) + + # try to create a group with name that's too long + self.assertPost( + endpoint_url, + self.admin, + {"name": "x" * 65}, + errors={"name": "Ensure this field has no more than 64 characters."}, + ) + + # update group by UUID + self.assertPost(endpoint_url + f"?uuid={reporters.uuid}", self.admin, {"name": "U-Reporters"}) + + reporters.refresh_from_db() + self.assertEqual(reporters.name, "U-Reporters") + + # can't update a system group + self.assertPost( + endpoint_url + f"?uuid={open_tickets.uuid}", + self.admin, + {"name": "Won't work"}, + errors={None: "Cannot modify system object."}, + status=403, + ) + self.assertTrue(self.org.groups.filter(name="Open Tickets").exists()) + + # can't update a group from other org + self.assertPost(endpoint_url + f"?uuid={spammers.uuid}", self.admin, {"name": "Won't work"}, status=404) + + # try an empty delete request + self.assertDelete( + endpoint_url, self.admin, errors={None: "URL must contain one of the following parameters: uuid"} + ) + + # delete a group by UUID + self.assertDelete(endpoint_url + f"?uuid={reporters.uuid}", self.admin, status=204) + + reporters.refresh_from_db() + self.assertFalse(reporters.is_active) + + # can't delete a system group + self.assertDelete( + endpoint_url + f"?uuid={open_tickets.uuid}", + self.admin, + errors={None: "Cannot delete system object."}, + status=403, + ) + self.assertTrue(self.org.groups.filter(name="Open Tickets").exists()) + + # can't delete a group with a trigger dependency + trigger = Trigger.create( + self.org, + self.admin, + Trigger.TYPE_KEYWORD, + self.create_flow("Test"), + keywords=["block_group"], + match_type=Trigger.MATCH_FIRST_WORD, + ) + trigger.groups.add(customers) + + self.assertDelete( + endpoint_url + f"?uuid={customers.uuid}", + self.admin, + errors={None: "Group is being used by triggers which must be archived first."}, + status=400, + ) + + # or a campaign dependency + trigger.groups.clear() + campaign = Campaign.create(self.org, self.admin, "Reminders", customers) + + self.assertDelete( + endpoint_url + f"?uuid={customers.uuid}", + self.admin, + errors={None: "Group is being used by campaigns which must be archived first."}, + status=400, + ) + + # can't delete a group in another org + self.assertDelete(endpoint_url + f"?uuid={spammers.uuid}", self.admin, status=404) + + campaign.delete() + for group in ContactGroup.objects.filter(is_system=False): + group.release(self.admin) + + for i in range(10): + ContactGroup.create_manual(self.org2, self.admin2, "group%d" % i) + + self.assertPost(endpoint_url, self.admin, {"name": "Reporters"}, status=201) + + ContactGroup.objects.filter(is_system=False, is_active=True).delete() + + for i in range(10): + ContactGroup.create_manual(self.org, self.admin, "group%d" % i) + + self.assertPost( + endpoint_url, + self.admin, + {"name": "Reporters"}, + errors={None: "Cannot create object because workspace has reached limit of 10."}, + status=409, + ) diff --git a/temba/api/v2/tests/test_labels.py b/temba/api/v2/tests/test_labels.py new file mode 100644 index 00000000000..8e7e176bc8a --- /dev/null +++ b/temba/api/v2/tests/test_labels.py @@ -0,0 +1,112 @@ +from django.test import override_settings +from django.urls import reverse + +from temba.msgs.models import Label + +from . import APITest + + +class LabelsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.labels") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotPermitted(endpoint_url + "?uuid=123", [None, self.user, self.agent]) + + frank = self.create_contact("Frank", urns=["tel:123456"]) + important = self.create_label("Important") + feedback = self.create_label("Feedback") + + # a deleted label + deleted = self.create_label("Deleted") + deleted.release(self.admin) + + # create label for other org + spam = self.create_label("Spam", org=self.org2) + + msg = self.create_incoming_msg(frank, "Hello") + important.toggle_label([msg], add=True) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + {"uuid": str(feedback.uuid), "name": "Feedback", "count": 0}, + {"uuid": str(important.uuid), "name": "Important", "count": 1}, + ], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by UUID + self.assertGet(endpoint_url + f"?uuid={feedback.uuid}", [self.editor], results=[feedback]) + + # filter by name + self.assertGet(endpoint_url + "?name=important", [self.editor], results=[important]) + + # try to filter by both + self.assertGet( + endpoint_url + f"?uuid={important.uuid}&name=important", + [self.editor], + errors={None: "You may only specify one of the uuid, name parameters"}, + ) + + # try to create empty label + self.assertPost(endpoint_url, self.editor, {}, errors={"name": "This field is required."}) + + # create new label + response = self.assertPost(endpoint_url, self.editor, {"name": "Interesting"}, status=201) + + interesting = Label.objects.get(name="Interesting") + self.assertEqual(response.json(), {"uuid": str(interesting.uuid), "name": "Interesting", "count": 0}) + + # try to create another label with same name + self.assertPost( + endpoint_url, self.admin, {"name": "interesting"}, errors={"name": "This field must be unique."} + ) + + # it's fine if a label in another org has that name + self.assertPost(endpoint_url, self.admin, {"name": "Spam"}, status=201) + + # try to create a label with invalid name + self.assertPost(endpoint_url, self.admin, {"name": '""'}, errors={"name": 'Cannot contain the character: "'}) + + # try to create a label with name that's too long + self.assertPost( + endpoint_url, + self.admin, + {"name": "x" * 65}, + errors={"name": "Ensure this field has no more than 64 characters."}, + ) + + # update label by UUID + response = self.assertPost(endpoint_url + f"?uuid={interesting.uuid}", self.admin, {"name": "More Interesting"}) + interesting.refresh_from_db() + self.assertEqual(interesting.name, "More Interesting") + + # can't update label from other org + self.assertPost(endpoint_url + f"?uuid={spam.uuid}", self.admin, {"name": "Won't work"}, status=404) + + # try an empty delete request + self.assertDelete( + endpoint_url, self.admin, errors={None: "URL must contain one of the following parameters: uuid"} + ) + + # delete a label by UUID + self.assertDelete(endpoint_url + f"?uuid={interesting.uuid}", self.admin) + interesting.refresh_from_db() + self.assertFalse(interesting.is_active) + + # try to delete a label in another org + self.assertDelete(endpoint_url + f"?uuid={spam.uuid}", self.admin, status=404) + + # try creating a new label after reaching the limit on labels + with override_settings(ORG_LIMIT_DEFAULTS={"labels": self.org.msgs_labels.filter(is_active=True).count()}): + self.assertPost( + endpoint_url, + self.admin, + {"name": "Interesting"}, + errors={None: "Cannot create object because workspace has reached limit of 3."}, + status=409, + ) diff --git a/temba/api/v2/tests/test_media.py b/temba/api/v2/tests/test_media.py new file mode 100644 index 00000000000..869c6544b44 --- /dev/null +++ b/temba/api/v2/tests/test_media.py @@ -0,0 +1,51 @@ +from unittest.mock import patch + +from django.conf import settings +from django.urls import reverse + +from temba.msgs.models import Media +from temba.tests import mock_uuids + +from . import APITest + + +class MediaEndpointTest(APITest): + @mock_uuids + def test_endpoint(self): + endpoint_url = reverse("api.v2.media") + ".json" + + self.assertGetNotAllowed(endpoint_url) + self.assertPostNotPermitted(endpoint_url, [None, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + def upload(user, filename: str): + self.login(user) + with open(filename, "rb") as data: + return self.client.post(endpoint_url, {"file": data}, HTTP_X_FORWARDED_HTTPS="https") + + self.login(self.admin) + response = self.client.post(endpoint_url, {}, HTTP_X_FORWARDED_HTTPS="https") + self.assertResponseError(response, "file", "No file was submitted.") + + response = upload(self.agent, f"{settings.MEDIA_ROOT}/test_imports/simple.xlsx") + self.assertResponseError(response, "file", "Unsupported file type.") + + with patch("temba.msgs.models.Media.MAX_UPLOAD_SIZE", 1024): + response = upload(self.editor, f"{settings.MEDIA_ROOT}/test_media/snow.mp4") + self.assertResponseError(response, "file", "Limit for file uploads is 0.0009765625 MB.") + + response = upload(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") + self.assertEqual(201, response.status_code) + self.assertEqual( + { + "uuid": "b97f69f7-5edf-45c7-9fda-d37066eae91d", + "content_type": "image/jpeg", + "url": f"{settings.STORAGE_URL}/orgs/{self.org.id}/media/b97f/b97f69f7-5edf-45c7-9fda-d37066eae91d/steve%20marten.jpg", + "filename": "steve marten.jpg", + "size": 7461, + }, + response.json(), + ) + + media = Media.objects.get() + self.assertEqual(Media.STATUS_READY, media.status) diff --git a/temba/api/v2/tests/test_message_actions.py b/temba/api/v2/tests/test_message_actions.py new file mode 100644 index 00000000000..c55289884b9 --- /dev/null +++ b/temba/api/v2/tests/test_message_actions.py @@ -0,0 +1,178 @@ +from django.urls import reverse + +from temba.msgs.models import Label, Msg + +from . import APITest + + +class MessageActionsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.message_actions") + ".json" + + self.assertGetNotAllowed(endpoint_url) + self.assertPostNotPermitted(endpoint_url, [None, self.user, self.agent]) + self.assertDeleteNotAllowed(endpoint_url) + + # create some messages to act on + joe = self.create_contact("Joe Blow", phone="+250788123123") + msg1 = self.create_incoming_msg(joe, "Msg #1") + msg2 = self.create_incoming_msg(joe, "Msg #2") + msg3 = self.create_incoming_msg(joe, "Msg #3") + label = self.create_label("Test") + + # add label by name to messages 1 and 2 + self.assertPost( + endpoint_url, self.editor, {"messages": [msg1.id, msg2.id], "action": "label", "label": "Test"}, status=204 + ) + self.assertEqual(set(label.get_messages()), {msg1, msg2}) + + # add label by its UUID to message 3 + self.assertPost( + endpoint_url, self.admin, {"messages": [msg3.id], "action": "label", "label": str(label.uuid)}, status=204 + ) + self.assertEqual(set(label.get_messages()), {msg1, msg2, msg3}) + + # try to label with an invalid UUID + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id], "action": "label", "label": "nope"}, + errors={"label": "No such object: nope"}, + ) + + # remove label from message 2 by name (which is case-insensitive) + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg2.id], "action": "unlabel", "label": "test"}, + status=204, + ) + self.assertEqual(set(label.get_messages()), {msg1, msg3}) + + # and remove from messages 1 and 3 by UUID + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id, msg3.id], "action": "unlabel", "label": str(label.uuid)}, + status=204, + ) + self.assertEqual(set(label.get_messages()), set()) + + # add new label via label_name + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg2.id, msg3.id], "action": "label", "label_name": "New"}, + status=204, + ) + new_label = Label.objects.get(org=self.org, name="New", is_active=True) + self.assertEqual(set(new_label.get_messages()), {msg2, msg3}) + + # no difference if label already exists as it does now + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id], "action": "label", "label_name": "New"}, + status=204, + ) + self.assertEqual(set(new_label.get_messages()), {msg1, msg2, msg3}) + + # can also remove by label_name + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg3.id], "action": "unlabel", "label_name": "New"}, + status=204, + ) + self.assertEqual(set(new_label.get_messages()), {msg1, msg2}) + + # and no error if label doesn't exist + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg3.id], "action": "unlabel", "label_name": "XYZ"}, + status=204, + ) + # and label not lazy created in this case + self.assertIsNone(Label.objects.filter(name="XYZ").first()) + + # try to use invalid label name + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id, msg2.id], "action": "label", "label_name": '"Hi"'}, + errors={"label_name": 'Cannot contain the character: "'}, + ) + + # try to label without specifying a label + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id, msg2.id], "action": "label"}, + errors={"non_field_errors": 'For action "label" you should also specify a label'}, + ) + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id, msg2.id], "action": "label", "label": ""}, + errors={"label": "This field may not be null."}, + ) + + # try to provide both label and label_name + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id], "action": "label", "label": "Test", "label_name": "Test"}, + errors={"non_field_errors": "Can't specify both label and label_name."}, + ) + + # archive all messages + self.assertPost( + endpoint_url, self.admin, {"messages": [msg1.id, msg2.id, msg3.id], "action": "archive"}, status=204 + ) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_ARCHIVED)), {msg1, msg2, msg3}) + + # restore message 1 + self.assertPost(endpoint_url, self.admin, {"messages": [msg1.id], "action": "restore"}, status=204) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_VISIBLE)), {msg1}) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_ARCHIVED)), {msg2, msg3}) + + # delete messages 2 + self.assertPost(endpoint_url, self.admin, {"messages": [msg2.id], "action": "delete"}, status=204) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_VISIBLE)), {msg1}) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_ARCHIVED)), {msg3}) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_DELETED_BY_USER)), {msg2}) + + # try to act on a a valid message and a deleted message + response = self.assertPost( + endpoint_url, self.admin, {"messages": [msg2.id, msg3.id], "action": "restore"}, status=200 + ) + + # should get a partial success + self.assertEqual(response.json(), {"failures": [msg2.id]}) + self.assertEqual(set(Msg.objects.filter(visibility=Msg.VISIBILITY_VISIBLE)), {msg1, msg3}) + + # try to act on an outgoing message + msg4 = self.create_outgoing_msg(joe, "Hi Joe") + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id, msg4.id], "action": "archive"}, + errors={"messages": f"Not an incoming message: {msg4.id}"}, + ) + + # try to provide a label for a non-labelling action + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id], "action": "archive", "label": "Test"}, + errors={"non_field_errors": 'For action "archive" you should not specify a label'}, + ) + + # try to invoke an invalid action + self.assertPost( + endpoint_url, + self.admin, + {"messages": [msg1.id], "action": "like"}, + errors={"action": '"like" is not a valid choice.'}, + ) diff --git a/temba/api/v2/tests/test_messages.py b/temba/api/v2/tests/test_messages.py new file mode 100644 index 00000000000..a75e5d6c701 --- /dev/null +++ b/temba/api/v2/tests/test_messages.py @@ -0,0 +1,290 @@ +from unittest.mock import call + +from django.conf import settings +from django.urls import reverse +from django.utils import timezone + +from temba.api.v2.serializers import format_datetime +from temba.msgs.models import Msg +from temba.tests import mock_mailroom + +from . import APITest + + +class MessagesEndpointTest(APITest): + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.messages") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + joe = self.create_contact("Joe Blow", phone="+250788123123") + frank = self.create_contact("Frank", urns=["facebook:123456"]) + + hans = self.create_contact("Hans Gruber", phone="+4921551511", org=self.org2) + self.create_channel("A", "Org2Channel", "123456", country="RW", org=self.org2) + + # create some messages + facebook = self.create_channel("FBA", "Facebook Channel", "billy_bob") + flow = self.create_flow("Test") + joe_msg1 = self.create_incoming_msg(joe, "Howdy", flow=flow) + frank_msg1 = self.create_incoming_msg(frank, "Bonjour", channel=facebook) + joe_msg2 = self.create_outgoing_msg(joe, "How are you?", status="Q") + frank_msg2 = self.create_outgoing_msg(frank, "Ça va?", status="D") + joe_msg3 = self.create_incoming_msg( + joe, "Good", flow=flow, attachments=["image/jpeg:https://example.com/test.jpg"] + ) + frank_msg3 = self.create_incoming_msg(frank, "Bien", channel=facebook, visibility="A") + frank_msg4 = self.create_outgoing_msg(frank, "Ça va?", status="F") + + # add a failed message with no URN or channel + joe_msg4 = self.create_outgoing_msg(joe, "Sorry", failed_reason=Msg.FAILED_NO_DESTINATION) + + # add an unhandled message + self.create_incoming_msg(joe, "Just in!", status="P") + + # add a deleted message + deleted_msg = self.create_incoming_msg(frank, "!@$!%", visibility="D") + + # add message in other org + self.create_incoming_msg(hans, "Guten tag!", channel=None) + + # label some of the messages, this will change our modified on as well for our `incoming` view + label = self.create_label("Spam") + + # we do this in two calls so that we can predict ordering later + label.toggle_label([frank_msg3], add=True) + label.toggle_label([frank_msg1], add=True) + label.toggle_label([joe_msg3], add=True) + + frank_msg1.refresh_from_db(fields=("modified_on",)) + joe_msg3.refresh_from_db(fields=("modified_on",)) + + # make this message sent later than other sent message created before it to check ordering of sent messages + frank_msg2.sent_on = timezone.now() + frank_msg2.save(update_fields=("sent_on",)) + + # default response is all messages sorted by created_on + self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin], + results=[joe_msg4, frank_msg4, frank_msg3, joe_msg3, frank_msg2, joe_msg2, frank_msg1, joe_msg1], + num_queries=self.BASE_SESSION_QUERIES + 6, + ) + + # filter by inbox + self.assertGet( + endpoint_url + "?folder=INBOX", + [self.admin], + results=[ + { + "id": frank_msg1.id, + "type": "text", + "channel": {"uuid": str(facebook.uuid), "name": "Facebook Channel"}, + "contact": {"uuid": str(frank.uuid), "name": "Frank"}, + "urn": "facebook:123456", + "text": "Bonjour", + "attachments": [], + "archived": False, + "broadcast": None, + "created_on": format_datetime(frank_msg1.created_on), + "direction": "in", + "flow": None, + "labels": [{"uuid": str(label.uuid), "name": "Spam"}], + "media": None, + "modified_on": format_datetime(frank_msg1.modified_on), + "sent_on": None, + "status": "handled", + "visibility": "visible", + } + ], + num_queries=self.BASE_SESSION_QUERIES + 5, + ) + + # filter by incoming, should get deleted messages too + self.assertGet( + endpoint_url + "?folder=incoming", + [self.admin], + results=[joe_msg3, frank_msg1, frank_msg3, deleted_msg, joe_msg1], + ) + + # filter by other folders.. + self.assertGet(endpoint_url + "?folder=flows", [self.admin], results=[joe_msg3, joe_msg1]) + self.assertGet(endpoint_url + "?folder=archived", [self.admin], results=[frank_msg3]) + self.assertGet(endpoint_url + "?folder=outbox", [self.admin], results=[joe_msg2]) + self.assertGet(endpoint_url + "?folder=sent", [self.admin], results=[frank_msg2]) + self.assertGet(endpoint_url + "?folder=failed", [self.admin], results=[joe_msg4, frank_msg4]) + + # filter by invalid folder + self.assertGet(endpoint_url + "?folder=invalid", [self.admin], results=[]) + + # filter by id + self.assertGet(endpoint_url + f"?id={joe_msg3.id}", [self.admin], results=[joe_msg3]) + + # filter by contact + self.assertGet( + endpoint_url + f"?contact={joe.uuid}", [self.admin], results=[joe_msg4, joe_msg3, joe_msg2, joe_msg1] + ) + + # filter by invalid contact + self.assertGet(endpoint_url + "?contact=invalid", [self.admin], results=[]) + + # filter by label UUID / name + self.assertGet(endpoint_url + f"?label={label.uuid}", [self.admin], results=[frank_msg3, joe_msg3, frank_msg1]) + self.assertGet(endpoint_url + "?label=Spam", [self.admin], results=[frank_msg3, joe_msg3, frank_msg1]) + + # filter by invalid label + self.assertGet(endpoint_url + "?label=invalid", [self.admin], results=[]) + + # filter by before (inclusive) + self.assertGet( + endpoint_url + f"?folder=incoming&before={format_datetime(frank_msg1.modified_on)}", + [self.editor], + results=[frank_msg1, frank_msg3, deleted_msg, joe_msg1], + ) + + # filter by after (inclusive) + self.assertGet( + endpoint_url + f"?folder=incoming&after={format_datetime(frank_msg1.modified_on)}", + [self.editor], + results=[joe_msg3, frank_msg1], + ) + + # filter by broadcast + broadcast = self.create_broadcast(self.user, {"eng": {"text": "A beautiful broadcast"}}, contacts=[joe, frank]) + self.assertGet( + endpoint_url + f"?broadcast={broadcast.id}", + [self.editor], + results=broadcast.msgs.order_by("-id"), + ) + + # can't filter with invalid id + self.assertGet(endpoint_url + "?id=xyz", [self.editor], errors={None: "Value for id must be an integer"}) + + # can't filter by more than one of contact, folder, label or broadcast together + for query in ( + f"?contact={joe.uuid}&label=Spam", + "?label=Spam&folder=inbox", + "?broadcast=12345&folder=inbox", + "?broadcast=12345&label=Spam", + ): + self.assertGet( + endpoint_url + query, + [self.editor], + errors={None: "You may only specify one of the contact, folder, label, broadcast parameters"}, + ) + + with self.anonymous(self.org): + # for anon orgs, don't return URN values + response = self.assertGet(endpoint_url + f"?id={joe_msg3.id}", [self.admin], results=[joe_msg3]) + self.assertIsNone(response.json()["results"][0]["urn"]) + + # try to create a message with empty request + self.assertPost(endpoint_url, self.admin, {}, errors={"contact": "This field is required."}) + + # try to create empty message + self.assertPost( + endpoint_url, + self.admin, + {"contact": joe.uuid}, + errors={"non_field_errors": "Must provide either text or attachments."}, + ) + + # create a new message with just text - which shouldn't need to read anything about the msg from the db + response = self.assertPost( + endpoint_url, + self.admin, + {"contact": joe.uuid, "text": "Interesting"}, + status=201, + ) + + msg = Msg.objects.order_by("id").last() + self.assertEqual( + { + "id": msg.id, + "type": "text", + "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, + "contact": {"uuid": str(joe.uuid), "name": "Joe Blow"}, + "urn": "tel:+250788123123", + "text": "Interesting", + "attachments": [], + "archived": False, + "broadcast": None, + "created_on": format_datetime(msg.created_on), + "direction": "out", + "flow": None, + "labels": [], + "media": None, + "modified_on": format_datetime(msg.modified_on), + "sent_on": None, + "status": "queued", + "visibility": "visible", + }, + response.json(), + ) + + self.assertEqual( + call(self.org, self.admin, joe, "Interesting", [], None), + mr_mocks.calls["msg_send"][-1], + ) + + # try to create a message with an invalid attachment media UUID + self.assertPost( + endpoint_url, + self.admin, + {"contact": joe.uuid, "text": "Hi", "attachments": ["xxxx"]}, + errors={"attachments": "No such object: xxxx"}, + ) + + # try to create a message with an non-existent attachment media UUID + self.assertPost( + endpoint_url, + self.admin, + {"contact": joe.uuid, "text": "Hi", "attachments": ["67ffe746-8771-40fb-89c1-5388e7ddd439"]}, + errors={"attachments": "No such object: 67ffe746-8771-40fb-89c1-5388e7ddd439"}, + ) + + upload = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") + + # create a new message with an attachment as the media UUID... + self.assertPost(endpoint_url, self.admin, {"contact": joe.uuid, "attachments": [str(upload.uuid)]}, status=201) + self.assertEqual( # check that was sent via mailroom + call(self.org, self.admin, joe, "", [f"image/jpeg:{upload.url}"], None), + mr_mocks.calls["msg_send"][-1], + ) + + # create a new message with an attachment as :... + self.assertPost( + endpoint_url, + self.admin, + {"contact": joe.uuid, "attachments": [f"image/jpeg:https://example.com/{upload.uuid}.jpg"]}, + status=201, + ) + self.assertEqual( + call(self.org, self.admin, joe, "", [f"image/jpeg:{upload.url}"], None), + mr_mocks.calls["msg_send"][-1], + ) + + # try to create a message with too many attachments + self.assertPost( + endpoint_url, + self.admin, + {"contact": joe.uuid, "attachments": [str(upload.uuid)] * 11}, + errors={"attachments": "Ensure this field has no more than 10 elements."}, + ) + + # try to create an unsendable message + billy_no_phone = self.create_contact("Billy", urns=[]) + response = self.assertPost( + endpoint_url, + self.admin, + {"contact": billy_no_phone.uuid, "text": "well?"}, + status=201, + ) + + msg_json = response.json() + self.assertIsNone(msg_json["channel"]) + self.assertIsNone(msg_json["urn"]) + self.assertEqual("failed", msg_json["status"]) diff --git a/temba/api/v2/tests/test_optins.py b/temba/api/v2/tests/test_optins.py new file mode 100644 index 00000000000..4b9e8696e49 --- /dev/null +++ b/temba/api/v2/tests/test_optins.py @@ -0,0 +1,73 @@ +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.msgs.models import OptIn +from temba.tests import matchers + +from . import APITest + + +class OptInEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.optins") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotPermitted(endpoint_url, [None, self.agent, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + # create some optins + polls = OptIn.create(self.org, self.admin, "Polls") + offers = OptIn.create(self.org, self.admin, "Offers") + OptIn.create(self.org2, self.admin, "Promos") + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + { + "uuid": str(offers.uuid), + "name": "Offers", + "created_on": format_datetime(offers.created_on), + }, + { + "uuid": str(polls.uuid), + "name": "Polls", + "created_on": format_datetime(polls.created_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 1, + ) + + # try to create empty optin + self.assertPost(endpoint_url, self.admin, {}, errors={"name": "This field is required."}) + + # create new optin + response = self.assertPost(endpoint_url, self.admin, {"name": "Alerts"}, status=201) + + alerts = OptIn.objects.get(name="Alerts") + self.assertEqual( + response.json(), + { + "uuid": str(alerts.uuid), + "name": "Alerts", + "created_on": matchers.ISODate(), + }, + ) + + # try to create another optin with same name + self.assertPost(endpoint_url, self.admin, {"name": "Alerts"}, errors={"name": "This field must be unique."}) + + # it's fine if a optin in another org has that name + self.assertPost(endpoint_url, self.editor, {"name": "Promos"}, status=201) + + # try to create a optin with invalid name + self.assertPost(endpoint_url, self.admin, {"name": '"Hi"'}, errors={"name": 'Cannot contain the character: "'}) + + # try to create a optin with name that's too long + self.assertPost( + endpoint_url, + self.admin, + {"name": "x" * 65}, + errors={"name": "Ensure this field has no more than 64 characters."}, + ) diff --git a/temba/api/v2/tests/test_resthooks.py b/temba/api/v2/tests/test_resthooks.py new file mode 100644 index 00000000000..771fc818bb3 --- /dev/null +++ b/temba/api/v2/tests/test_resthooks.py @@ -0,0 +1,157 @@ +from django.urls import reverse + +from temba.api.models import Resthook, WebHookEvent +from temba.api.v2.serializers import format_datetime + +from . import APITest + + +class ResthooksEndpointTest(APITest): + def test_endpoint(self): + hooks_url = reverse("api.v2.resthooks") + ".json" + subs_url = reverse("api.v2.resthook_subscribers") + ".json" + events_url = reverse("api.v2.resthook_events") + ".json" + + self.assertGetNotPermitted(hooks_url, [None, self.agent, self.user]) + self.assertPostNotAllowed(hooks_url) + self.assertDeleteNotAllowed(hooks_url) + + self.assertGetNotPermitted(subs_url, [None, self.agent, self.user]) + self.assertPostNotPermitted(subs_url, [None, self.agent, self.user]) + self.assertDeleteNotPermitted(subs_url, [None, self.agent, self.user]) + + self.assertGetNotPermitted(events_url, [None, self.agent, self.user]) + self.assertPostNotAllowed(events_url) + self.assertDeleteNotAllowed(events_url) + + # create some resthooks + resthook1 = Resthook.get_or_create(self.org, "new-mother", self.admin) + resthook2 = Resthook.get_or_create(self.org, "new-father", self.admin) + resthook3 = Resthook.get_or_create(self.org, "not-active", self.admin) + resthook3.is_active = False + resthook3.save() + + # create a resthook for another org + other_org_resthook = Resthook.get_or_create(self.org2, "spam", self.admin2) + + # fetch hooks with no filtering + self.assertGet( + hooks_url, + [self.editor, self.admin], + results=[ + { + "resthook": "new-father", + "created_on": format_datetime(resthook2.created_on), + "modified_on": format_datetime(resthook2.modified_on), + }, + { + "resthook": "new-mother", + "created_on": format_datetime(resthook1.created_on), + "modified_on": format_datetime(resthook1.modified_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 1, + ) + + # try to create empty subscription + self.assertPost( + subs_url, + self.admin, + {}, + errors={"resthook": "This field is required.", "target_url": "This field is required."}, + ) + + # try to create one for resthook in other org + self.assertPost( + subs_url, + self.admin, + {"resthook": "spam", "target_url": "https://foo.bar/"}, + errors={"resthook": "No resthook with slug: spam"}, + ) + + # create subscribers on each resthook + self.assertPost( + subs_url, self.editor, {"resthook": "new-mother", "target_url": "https://foo.bar/mothers"}, status=201 + ) + self.assertPost( + subs_url, self.admin, {"resthook": "new-father", "target_url": "https://foo.bar/fathers"}, status=201 + ) + + hook1_subscriber = resthook1.subscribers.get() + hook2_subscriber = resthook2.subscribers.get() + + # create a subscriber on our other resthook + other_org_subscriber = other_org_resthook.add_subscriber("https://bar.foo", self.admin2) + + # fetch subscribers with no filtering + self.assertGet( + subs_url, + [self.editor, self.admin], + results=[ + { + "id": hook2_subscriber.id, + "resthook": "new-father", + "target_url": "https://foo.bar/fathers", + "created_on": format_datetime(hook2_subscriber.created_on), + }, + { + "id": hook1_subscriber.id, + "resthook": "new-mother", + "target_url": "https://foo.bar/mothers", + "created_on": format_datetime(hook1_subscriber.created_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 1, + ) + + # filter by id + self.assertGet(subs_url + f"?id={hook1_subscriber.id}", [self.editor], results=[hook1_subscriber]) + + # filter by resthook + self.assertGet(subs_url + "?resthook=new-father", [self.editor], results=[hook2_subscriber]) + + # remove a subscriber + self.assertDelete(subs_url + f"?id={hook2_subscriber.id}", self.admin) + + # subscriber should no longer be active + hook2_subscriber.refresh_from_db() + self.assertFalse(hook2_subscriber.is_active) + + # try to delete without providing id + self.assertDelete( + subs_url + "?", self.editor, errors={None: "URL must contain one of the following parameters: id"} + ) + + # try to delete a subscriber from another org + self.assertDelete(subs_url + f"?id={other_org_subscriber.id}", self.editor, status=404) + + # create some events on our resthooks + event1 = WebHookEvent.objects.create( + org=self.org, + resthook=resthook1, + data={"event": "new mother", "values": {"name": "Greg"}, "steps": {"uuid": "abcde"}}, + ) + event2 = WebHookEvent.objects.create( + org=self.org, + resthook=resthook2, + data={"event": "new father", "values": {"name": "Yo"}, "steps": {"uuid": "12345"}}, + ) + + # fetch events with no filtering + self.assertGet( + events_url, + [self.editor, self.admin], + results=[ + { + "resthook": "new-father", + "created_on": format_datetime(event2.created_on), + "data": {"event": "new father", "values": {"name": "Yo"}, "steps": {"uuid": "12345"}}, + }, + { + "resthook": "new-mother", + "created_on": format_datetime(event1.created_on), + "data": {"event": "new mother", "values": {"name": "Greg"}, "steps": {"uuid": "abcde"}}, + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 1, + ) diff --git a/temba/api/v2/tests/test_runs.py b/temba/api/v2/tests/test_runs.py new file mode 100644 index 00000000000..9ddfe7e502f --- /dev/null +++ b/temba/api/v2/tests/test_runs.py @@ -0,0 +1,294 @@ +import iso8601 + +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.flows.models import FlowStart +from temba.tests.engine import MockSessionWriter + +from . import APITest + + +class RunsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.runs") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None, self.agent]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + flow1 = self.get_flow("color_v13") + flow2 = flow1.clone(self.user) + + flow1_nodes = flow1.get_definition()["nodes"] + color_prompt = flow1_nodes[0] + color_split = flow1_nodes[4] + blue_reply = flow1_nodes[2] + + joe = self.create_contact("Joe Blow", phone="+250788123123") + frank = self.create_contact("Frank", urns=["tel:123456"]) + start1 = FlowStart.create(flow1, self.admin, contacts=[joe]) + joe_msg = self.create_incoming_msg(joe, "it is blue") + frank_msg = self.create_incoming_msg(frank, "Indigo") + + joe_run1 = ( + MockSessionWriter(joe, flow1, start=start1) + .visit(color_prompt) + .visit(color_split) + .wait() + .resume(msg=joe_msg) + .set_result("Color", "blue", "Blue", "it is blue") + .visit(blue_reply) + .complete() + .save() + ).session.runs.get() + + frank_run1 = ( + MockSessionWriter(frank, flow1) + .visit(color_prompt) + .visit(color_split) + .wait() + .resume(msg=frank_msg) + .set_result("Color", "Indigo", "Other", "Indigo") + .wait() + .save() + ).session.runs.get() + + joe_run2 = ( + MockSessionWriter(joe, flow1).visit(color_prompt).visit(color_split).wait().save() + ).session.runs.get() + frank_run2 = ( + MockSessionWriter(frank, flow1).visit(color_prompt).visit(color_split).wait().save() + ).session.runs.get() + + joe_run3 = MockSessionWriter(joe, flow2).wait().save().session.runs.get() + + # add a run for another org + flow3 = self.create_flow("Test", org=self.org2) + hans = self.create_contact("Hans Gruber", phone="+4921551511", org=self.org2) + MockSessionWriter(hans, flow3).wait().save() + + # refresh runs which will have been modified by being interrupted + joe_run1.refresh_from_db() + joe_run2.refresh_from_db() + frank_run1.refresh_from_db() + frank_run2.refresh_from_db() + + # no filtering + response = self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[joe_run3, joe_run2, frank_run2, frank_run1, joe_run1], + num_queries=self.BASE_SESSION_QUERIES + 6, + ) + resp_json = response.json() + self.assertEqual( + { + "id": frank_run2.id, + "uuid": str(frank_run2.uuid), + "flow": {"uuid": str(flow1.uuid), "name": "Colors"}, + "contact": { + "uuid": str(frank.uuid), + "name": frank.name, + "urn": "tel:123456", + "urn_display": "123456", + }, + "start": None, + "responded": False, + "path": [ + { + "node": color_prompt["uuid"], + "time": format_datetime(iso8601.parse_date(frank_run2.path[0]["arrived_on"])), + }, + { + "node": color_split["uuid"], + "time": format_datetime(iso8601.parse_date(frank_run2.path[1]["arrived_on"])), + }, + ], + "values": {}, + "created_on": format_datetime(frank_run2.created_on), + "modified_on": format_datetime(frank_run2.modified_on), + "exited_on": None, + "exit_type": None, + }, + resp_json["results"][2], + ) + self.assertEqual( + { + "id": joe_run1.id, + "uuid": str(joe_run1.uuid), + "flow": {"uuid": str(flow1.uuid), "name": "Colors"}, + "contact": { + "uuid": str(joe.uuid), + "name": joe.name, + "urn": "tel:+250788123123", + "urn_display": "0788 123 123", + }, + "start": {"uuid": str(joe_run1.start.uuid)}, + "responded": True, + "path": [ + { + "node": color_prompt["uuid"], + "time": format_datetime(iso8601.parse_date(joe_run1.path[0]["arrived_on"])), + }, + { + "node": color_split["uuid"], + "time": format_datetime(iso8601.parse_date(joe_run1.path[1]["arrived_on"])), + }, + { + "node": blue_reply["uuid"], + "time": format_datetime(iso8601.parse_date(joe_run1.path[2]["arrived_on"])), + }, + ], + "values": { + "color": { + "value": "blue", + "category": "Blue", + "node": color_split["uuid"], + "time": format_datetime(iso8601.parse_date(joe_run1.results["color"]["created_on"])), + "name": "Color", + } + }, + "created_on": format_datetime(joe_run1.created_on), + "modified_on": format_datetime(joe_run1.modified_on), + "exited_on": format_datetime(joe_run1.exited_on), + "exit_type": "completed", + }, + resp_json["results"][4], + ) + + # can request without path data + response = self.assertGet( + endpoint_url + "?paths=false", [self.editor], results=[joe_run3, joe_run2, frank_run2, frank_run1, joe_run1] + ) + resp_json = response.json() + self.assertEqual( + { + "id": frank_run2.id, + "uuid": str(frank_run2.uuid), + "flow": {"uuid": str(flow1.uuid), "name": "Colors"}, + "contact": { + "uuid": str(frank.uuid), + "name": frank.name, + "urn": "tel:123456", + "urn_display": "123456", + }, + "start": None, + "responded": False, + "path": None, + "values": {}, + "created_on": format_datetime(frank_run2.created_on), + "modified_on": format_datetime(frank_run2.modified_on), + "exited_on": None, + "exit_type": None, + }, + resp_json["results"][2], + ) + + # reversed + self.assertGet( + endpoint_url + "?reverse=true", + [self.editor], + results=[joe_run1, frank_run1, frank_run2, joe_run2, joe_run3], + ) + + # filter by id + self.assertGet(endpoint_url + f"?id={frank_run2.id}", [self.admin], results=[frank_run2]) + + # anon orgs should not have a URN field + with self.anonymous(self.org): + response = self.assertGet(endpoint_url + f"?id={frank_run2.id}", [self.admin], results=[frank_run2]) + self.assertEqual( + { + "id": frank_run2.pk, + "uuid": str(frank_run2.uuid), + "flow": {"uuid": flow1.uuid, "name": "Colors"}, + "contact": { + "uuid": frank.uuid, + "name": frank.name, + "urn": "tel:********", + "urn_display": None, + "anon_display": f"{frank.id:010}", + }, + "start": None, + "responded": False, + "path": [ + { + "node": color_prompt["uuid"], + "time": format_datetime(iso8601.parse_date(frank_run2.path[0]["arrived_on"])), + }, + { + "node": color_split["uuid"], + "time": format_datetime(iso8601.parse_date(frank_run2.path[1]["arrived_on"])), + }, + ], + "values": {}, + "created_on": format_datetime(frank_run2.created_on), + "modified_on": format_datetime(frank_run2.modified_on), + "exited_on": None, + "exit_type": None, + }, + response.json()["results"][0], + ) + + # filter by uuid + self.assertGet(endpoint_url + f"?uuid={frank_run2.uuid}", [self.admin], results=[frank_run2]) + + # filter by id and uuid + self.assertGet(endpoint_url + f"?uuid={frank_run2.uuid}&id={joe_run1.id}", [self.admin], results=[]) + self.assertGet(endpoint_url + f"?uuid={frank_run2.uuid}&id={frank_run2.id}", [self.admin], results=[frank_run2]) + + # filter by flow + self.assertGet( + endpoint_url + f"?flow={flow1.uuid}", [self.admin], results=[joe_run2, frank_run2, frank_run1, joe_run1] + ) + + # doesn't work if flow is inactive + flow1.is_active = False + flow1.save() + + self.assertGet(endpoint_url + f"?flow={flow1.uuid}", [self.admin], results=[]) + + # restore to active + flow1.is_active = True + flow1.save() + + # filter by invalid flow + self.assertGet(endpoint_url + "?flow=invalid", [self.admin], results=[]) + + # filter by flow + responded + self.assertGet( + endpoint_url + f"?flow={flow1.uuid}&responded=TrUe", [self.admin], results=[frank_run1, joe_run1] + ) + + # filter by contact + self.assertGet(endpoint_url + f"?contact={joe.uuid}", [self.admin], results=[joe_run3, joe_run2, joe_run1]) + + # filter by invalid contact + self.assertGet(endpoint_url + "?contact=invalid", [self.admin], results=[]) + + # filter by contact + responded + self.assertGet(endpoint_url + f"?contact={joe.uuid}&responded=yes", [self.admin], results=[joe_run1]) + + # filter by before / after + self.assertGet( + endpoint_url + f"?before={format_datetime(frank_run1.modified_on)}", + [self.admin], + results=[frank_run1, joe_run1], + ) + self.assertGet( + endpoint_url + f"?after={format_datetime(frank_run1.modified_on)}", + [self.admin], + results=[joe_run3, joe_run2, frank_run2, frank_run1], + ) + + # filter by invalid before / after + self.assertGet(endpoint_url + "?before=longago", [self.admin], results=[]) + self.assertGet(endpoint_url + "?after=thefuture", [self.admin], results=[]) + + # can't filter by both contact and flow together + self.assertGet( + endpoint_url + f"?contact={joe.uuid}&flow={flow1.uuid}", + [self.admin], + errors={None: "You may only specify one of the contact, flow parameters"}, + ) diff --git a/temba/api/v2/tests/test_serializers.py b/temba/api/v2/tests/test_serializers.py new file mode 100644 index 00000000000..57adc17500a --- /dev/null +++ b/temba/api/v2/tests/test_serializers.py @@ -0,0 +1,343 @@ +from rest_framework import serializers + +from django.conf import settings + +from temba.api.v2 import fields +from temba.campaigns.models import Campaign, CampaignEvent +from temba.contacts.models import Contact, ContactField, ContactURN + +from . import APITest + + +class FieldsTest(APITest): + def assert_field(self, f, *, submissions: dict, representations: dict): + f._context = {"org": self.org} # noqa + + for submitted, expected in submissions.items(): + if isinstance(expected, type) and issubclass(expected, Exception): + with self.assertRaises(expected, msg=f"expected exception for '{submitted}'"): + f.run_validation(submitted) + else: + self.assertEqual(f.run_validation(submitted), expected, f"to_internal_value mismatch for '{submitted}'") + + for value, expected in representations.items(): + self.assertEqual(f.to_representation(value), expected, f"to_representation mismatch for '{value}'") + + def test_contact(self): + joe = self.create_contact("Joe", urns=["tel:+593999123456"]) + frank = self.create_contact("Frank", urns=["twitterid:2352463463#franky"]) # urn has display fragment + voldemort = self.create_contact("", urns=[]) # no name or URNs + + self.assert_field( + fields.ContactField(source="test"), + submissions={ + joe.uuid: joe, # by UUID + joe.get_urn().urn: joe, # by URN + 0: serializers.ValidationError, + (joe.uuid, frank.uuid): serializers.ValidationError, + }, + representations={ + joe: {"uuid": str(joe.uuid), "name": "Joe"}, + }, + ) + + self.assert_field( + fields.ContactField(source="test", as_summary=True), + submissions={ + joe.uuid: joe, # by UUID + joe.get_urn().urn: joe, # by URN + 0: serializers.ValidationError, + (joe.uuid, frank.uuid): serializers.ValidationError, + }, + representations={ + joe: { + "uuid": str(joe.uuid), + "name": "Joe", + "urn": "tel:+593999123456", + "urn_display": "099 912 3456", + }, + frank: { + "uuid": str(frank.uuid), + "name": "Frank", + "urn": "twitterid:2352463463", + "urn_display": "franky", + }, + voldemort: { + "uuid": str(voldemort.uuid), + "name": "", + "urn": None, + "urn_display": None, + }, + }, + ) + + self.assert_field( + fields.ContactField(source="test", many=True), + submissions={ + (joe.uuid, frank.uuid): [joe, frank], + joe.uuid: serializers.ValidationError, + }, + representations={ + (joe, frank): [ + {"uuid": str(joe.uuid), "name": "Joe"}, + {"uuid": str(frank.uuid), "name": "Frank"}, + ] + }, + ) + + with self.anonymous(self.org): + # load contacts again without cached org on them or their urns + joe = Contact.objects.get(id=joe.id) + frank = Contact.objects.get(id=frank.id) + voldemort = Contact.objects.get(id=voldemort.id) + + self.assert_field( + fields.ContactField(source="test"), + submissions={ + joe.uuid: joe, # by UUID + joe.get_urn().urn: joe, # by URN + 0: serializers.ValidationError, + (joe.uuid, frank.uuid): serializers.ValidationError, + }, + representations={ + joe: {"uuid": str(joe.uuid), "name": "Joe"}, + frank: {"uuid": str(frank.uuid), "name": "Frank"}, + voldemort: {"uuid": str(voldemort.uuid), "name": ""}, + }, + ) + + self.assert_field( + fields.ContactField(source="test", as_summary=True), + submissions={ + joe.uuid: joe, # by UUID + joe.get_urn().urn: joe, # by URN + 0: serializers.ValidationError, + (joe.uuid, frank.uuid): serializers.ValidationError, + }, + representations={ + joe: { + "uuid": str(joe.uuid), + "name": "Joe", + "urn": "tel:********", + "urn_display": None, + "anon_display": f"{joe.id:010}", + }, + frank: { + "uuid": str(frank.uuid), + "name": "Frank", + "urn": "twitterid:********", + "urn_display": None, + "anon_display": f"{frank.id:010}", + }, + voldemort: { + "uuid": str(voldemort.uuid), + "name": "", + "urn": None, + "urn_display": None, + "anon_display": f"{voldemort.id:010}", + }, + }, + ) + + def test_language_and_translations(self): + self.assert_field( + fields.LanguageField(source="test"), + submissions={ + "eng": "eng", + "kin": "kin", + 123: serializers.ValidationError, + "base": serializers.ValidationError, + }, + representations={"eng": "eng"}, + ) + + field = fields.LimitedDictField(source="test", max_length=2) + self.assertEqual({"foo": "bar", "zed": 123}, field.run_validation({"foo": "bar", "zed": 123})) + self.assertRaises(serializers.ValidationError, field.run_validation, {"1": 1, "2": 2, "3": 3}) + + field = fields.LanguageDictField(source="test") + self.assertEqual(field.run_validation({"eng": "Hello"}), {"eng": "Hello"}) + self.assertRaises(serializers.ValidationError, field.run_validation, {"base": ""}) + + field = fields.TranslatedTextField(source="test", max_length=10) + field._context = {"org": self.org} + + self.assertEqual(field.run_validation("Hello"), {"eng": "Hello"}) + self.assertEqual(field.run_validation({"eng": "Hello"}), {"eng": "Hello"}) + self.assertEqual(field.run_validation({"eng": "Hello", "spa": "Hola"}), {"eng": "Hello", "spa": "Hola"}) + self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": ""}) # empty + self.assertRaises(serializers.ValidationError, field.run_validation, "") # empty + self.assertRaises(serializers.ValidationError, field.run_validation, " ") # blank + self.assertRaises(serializers.ValidationError, field.run_validation, 123) # not a string or dict + self.assertRaises(serializers.ValidationError, field.run_validation, {}) # no translations + self.assertRaises(serializers.ValidationError, field.run_validation, {123: "Hello"}) # lang not a str + self.assertRaises(serializers.ValidationError, field.run_validation, {"base": "Hello"}) # lang not valid + self.assertRaises(serializers.ValidationError, field.run_validation, "HelloHello1") # translation too long + self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": "HelloHello1"}) + + media1 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") + media2 = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/snow.mp4") + + field = fields.TranslatedAttachmentsField(source="test") + field._context = {"org": self.org} + + self.assertEqual(field.run_validation([f"image/jpeg:{media1.url}"]), {"eng": [media1]}) + self.assertEqual(field.run_validation({"eng": [str(media1.uuid)]}), {"eng": [media1]}) + self.assertEqual( + field.run_validation({"eng": [str(media1.uuid), str(media2.uuid)], "spa": [str(media1.uuid)]}), + {"eng": [media1, media2], "spa": [media1]}, + ) + self.assertRaises(serializers.ValidationError, field.run_validation, {}) # empty + self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": [""]}) # empty + self.assertRaises(serializers.ValidationError, field.run_validation, {"eng": [" "]}) # blank + self.assertRaises(serializers.ValidationError, field.run_validation, {"base": ["Hello"]}) # lang not valid + self.assertRaises( + serializers.ValidationError, field.run_validation, {"eng": ["Hello"]} + ) # translation not valid attachment + self.assertRaises( + serializers.ValidationError, field.run_validation, {"kin": f"image/jpeg:{media1.url}"} + ) # translation not a list + self.assertRaises( + serializers.ValidationError, field.run_validation, {"eng": [f"image/jpeg:{media1.url}"] * 11} + ) # too many + + # check that default language is based on first flow language + self.org.flow_languages = ["spa", "kin"] + self.org.save(update_fields=("flow_languages",)) + + self.assertEqual(field.to_internal_value([str(media1.uuid)]), {"spa": [media1]}) + + def test_others(self): + group = self.create_group("Customers") + field_obj = self.create_field("registered", "Registered On", value_type=ContactField.TYPE_DATETIME) + flow = self.create_flow("Test") + campaign = Campaign.create(self.org, self.admin, "Reminders #1", group) + event = CampaignEvent.create_flow_event( + self.org, self.admin, campaign, field_obj, 6, CampaignEvent.UNIT_HOURS, flow, delivery_hour=12 + ) + media = self.upload_media(self.admin, f"{settings.MEDIA_ROOT}/test_media/steve marten.jpg") + + field = fields.CampaignField(source="test") + field._context = {"org": self.org} + + self.assertEqual(field.to_internal_value(str(campaign.uuid)), campaign) + self.assertRaises(serializers.ValidationError, field.to_internal_value, {"id": 3}) # not a string or int + + field = fields.CampaignEventField(source="test") + field._context = {"org": self.org} + + self.assertEqual(field.to_internal_value(str(event.uuid)), event) + + field._context = {"org": self.org2} + + self.assertRaises(serializers.ValidationError, field.to_internal_value, event.uuid) + + deleted_channel = self.create_channel("A", "My Android", "123456") + deleted_channel.is_active = False + deleted_channel.save(update_fields=("is_active",)) + + self.assert_field( + fields.ChannelField(source="test"), + submissions={self.channel.uuid: self.channel, deleted_channel.uuid: serializers.ValidationError}, + representations={self.channel: {"uuid": str(self.channel.uuid), "name": "Test Channel"}}, + ) + + self.assert_field( + fields.ContactGroupField(source="test"), + submissions={group.uuid: group}, + representations={group: {"uuid": str(group.uuid), "name": "Customers"}}, + ) + + field_created_on = self.org.fields.get(key="created_on") + + self.assert_field( + fields.ContactFieldField(source="test"), + submissions={"registered": field_obj, "created_on": field_created_on, "xyz": serializers.ValidationError}, + representations={field_obj: {"key": "registered", "name": "Registered On", "label": "Registered On"}}, + ) + + self.assert_field( + fields.FlowField(source="test"), + submissions={flow.uuid: flow}, + representations={flow: {"uuid": str(flow.uuid), "name": flow.name}}, + ) + + self.assert_field( + fields.MediaField(source="test"), + submissions={str(media.uuid): media, "xyz": serializers.ValidationError}, + representations={media: str(media.uuid)}, + ) + + self.assert_field( + fields.TopicField(source="test"), + submissions={str(self.org.default_ticket_topic.uuid): self.org.default_ticket_topic}, + representations={ + self.org.default_ticket_topic: {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"} + }, + ) + + self.assert_field( + fields.URNField(source="test"), + submissions={ + "tel:+1-800-123-4567": "tel:+18001234567", + "tel:0788 123 123": "tel:+250788123123", # using org country + "tel:(078) 812-3123": "tel:+250788123123", + "12345": serializers.ValidationError, # un-parseable + "tel:800-123-4567": serializers.ValidationError, # no country code + f"external:{'1' * 256}": serializers.ValidationError, # too long + 18_001_234_567: serializers.ValidationError, # non-string + }, + representations={"tel:+18001234567": "tel:+18001234567"}, + ) + + self.editor.is_active = False + self.editor.save(update_fields=("is_active",)) + + self.assert_field( + fields.UserField(source="test"), + submissions={ + "VIEWER@TEXTIT.COM": self.user, + "admin@textit.com": self.admin, + self.editor.email: serializers.ValidationError, # deleted + self.admin2.email: serializers.ValidationError, # not in org + }, + representations={ + self.user: {"email": "viewer@textit.com", "name": ""}, + self.editor: {"email": "editor@textit.com", "name": "Ed McEdits"}, + }, + ) + self.assert_field( + fields.UserField(source="test", assignable_only=True), + submissions={ + self.user.email: serializers.ValidationError, # not assignable + self.admin.email: self.admin, + self.agent.email: self.agent, + }, + representations={self.agent: {"email": "agent@textit.com", "name": "Agnes"}}, + ) + + def test_serialize_urn(self): + urn_obj = ContactURN.objects.create( + org=self.org, scheme="tel", path="+250788383383", identity="tel:+250788383383", priority=50, display="xyz" + ) + urn_dict = { + "channel": {"name": "Twilio", "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8"}, + "scheme": "tel", + "path": "+250788383383", + "display": "xyz", + } + + self.assertEqual("tel:+250788383383", fields.serialize_urn(self.org, urn_obj)) + self.assertEqual(urn_dict, fields.serialize_urn(self.org, urn_dict)) + + with self.anonymous(self.org): + self.assertEqual("tel:********", fields.serialize_urn(self.org, urn_obj)) + self.assertEqual( + { + "channel": {"name": "Twilio", "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8"}, + "scheme": "tel", + "path": "********", + "display": "xyz", + }, + fields.serialize_urn(self.org, urn_dict), + ) diff --git a/temba/api/v2/tests/test_ticket_actions.py b/temba/api/v2/tests/test_ticket_actions.py new file mode 100644 index 00000000000..4c14ece2a92 --- /dev/null +++ b/temba/api/v2/tests/test_ticket_actions.py @@ -0,0 +1,151 @@ +from datetime import datetime, timezone as tzone + +from django.urls import reverse + +from temba.tests import mock_mailroom +from temba.tickets.models import Topic + +from . import APITest + + +class TicketActionsEndpointTest(APITest): + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.ticket_actions") + ".json" + + self.assertGetNotAllowed(endpoint_url) + self.assertPostNotPermitted(endpoint_url, [None, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + # create some tickets + joe = self.create_contact("Joe Blow", phone="+250788123123") + frank = self.create_contact("Frank", urns=["facebook:123456"]) + sales = Topic.create(self.org, self.admin, "Sales") + ticket1 = self.create_ticket(joe, closed_on=datetime(2021, 1, 1, 12, 30, 45, 123456, tzone.utc)) + ticket2 = self.create_ticket(joe) + self.create_ticket(frank) + + # on another org + ticket4 = self.create_ticket(self.create_contact("Jim", urns=["twitter:jimmy"], org=self.org2)) + + # try actioning more tickets than this endpoint is allowed to operate on at one time + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(x) for x in range(101)], "action": "close"}, + errors={"tickets": "Ensure this field has no more than 100 elements."}, + ) + + # try actioning a ticket which is not in this org + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid), str(ticket4.uuid)], "action": "close"}, + errors={"tickets": f"No such object: {ticket4.uuid}"}, + ) + + # try to close tickets without specifying any tickets + self.assertPost( + endpoint_url, + self.agent, + {"action": "close"}, + errors={"tickets": "This field is required."}, + ) + + # try to assign ticket without specifying assignee + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid)], "action": "assign"}, + errors={"non_field_errors": 'For action "assign" you must specify the assignee'}, + ) + + # try to add a note without specifying note + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid)], "action": "add_note"}, + errors={"non_field_errors": 'For action "add_note" you must specify the note'}, + ) + + # try to change topic without specifying topic + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid)], "action": "change_topic"}, + errors={"non_field_errors": 'For action "change_topic" you must specify the topic'}, + ) + + # assign valid tickets to a user + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "assign", "assignee": "agent@textit.com"}, + status=204, + ) + + ticket1.refresh_from_db() + ticket2.refresh_from_db() + self.assertEqual(self.agent, ticket1.assignee) + self.assertEqual(self.agent, ticket2.assignee) + + # unassign tickets + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid)], "action": "assign", "assignee": None}, + status=204, + ) + + ticket1.refresh_from_db() + self.assertIsNone(ticket1.assignee) + + # add a note to tickets + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "add_note", "note": "Looks important"}, + status=204, + ) + + self.assertEqual("Looks important", ticket1.events.last().note) + self.assertEqual("Looks important", ticket2.events.last().note) + + # change topic of tickets + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "change_topic", "topic": str(sales.uuid)}, + status=204, + ) + + ticket1.refresh_from_db() + ticket2.refresh_from_db() + self.assertEqual(sales, ticket1.topic) + self.assertEqual(sales, ticket2.topic) + + # close tickets + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "close"}, + status=204, + ) + + ticket1.refresh_from_db() + ticket2.refresh_from_db() + self.assertEqual("C", ticket1.status) + self.assertEqual("C", ticket2.status) + + # and finally reopen them + self.assertPost( + endpoint_url, + self.agent, + {"tickets": [str(ticket1.uuid), str(ticket2.uuid)], "action": "reopen"}, + status=204, + ) + + ticket1.refresh_from_db() + ticket2.refresh_from_db() + self.assertEqual("O", ticket1.status) + self.assertEqual("O", ticket2.status) diff --git a/temba/api/v2/tests/test_tickets.py b/temba/api/v2/tests/test_tickets.py new file mode 100644 index 00000000000..51511b19542 --- /dev/null +++ b/temba/api/v2/tests/test_tickets.py @@ -0,0 +1,89 @@ +from datetime import datetime, timezone as tzone + +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.tests import mock_mailroom + +from . import APITest + + +class TicketsEndpointTest(APITest): + @mock_mailroom + def test_endpoint(self, mr_mocks): + endpoint_url = reverse("api.v2.tickets") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + # create some tickets + ann = self.create_contact("Ann", urns=["twitter:annie"]) + bob = self.create_contact("Bob", urns=["twitter:bobby"]) + flow = self.create_flow("Support") + + ticket1 = self.create_ticket( + ann, opened_by=self.admin, closed_on=datetime(2021, 1, 1, 12, 30, 45, 123456, tzone.utc) + ) + ticket2 = self.create_ticket(bob, opened_in=flow) + ticket3 = self.create_ticket(bob, assignee=self.agent) + + # on another org + self.create_ticket(self.create_contact("Jim", urns=["twitter:jimmy"], org=self.org2)) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor, self.admin, self.agent], + results=[ + { + "uuid": str(ticket3.uuid), + "assignee": {"email": "agent@textit.com", "name": "Agnes"}, + "contact": {"uuid": str(bob.uuid), "name": "Bob"}, + "status": "open", + "topic": {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"}, + "body": None, + "opened_on": format_datetime(ticket3.opened_on), + "opened_by": None, + "opened_in": None, + "modified_on": format_datetime(ticket3.modified_on), + "closed_on": None, + }, + { + "uuid": str(ticket2.uuid), + "assignee": None, + "contact": {"uuid": str(bob.uuid), "name": "Bob"}, + "status": "open", + "topic": {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"}, + "body": None, + "opened_on": format_datetime(ticket2.opened_on), + "opened_by": None, + "opened_in": {"uuid": str(flow.uuid), "name": "Support"}, + "modified_on": format_datetime(ticket2.modified_on), + "closed_on": None, + }, + { + "uuid": str(ticket1.uuid), + "assignee": None, + "contact": {"uuid": str(ann.uuid), "name": "Ann"}, + "status": "closed", + "topic": {"uuid": str(self.org.default_ticket_topic.uuid), "name": "General"}, + "body": None, + "opened_on": format_datetime(ticket1.opened_on), + "opened_by": {"email": "admin@textit.com", "name": "Andy"}, + "opened_in": None, + "modified_on": format_datetime(ticket1.modified_on), + "closed_on": "2021-01-01T12:30:45.123456Z", + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 6, + ) + + # filter by contact uuid (not there) + self.assertGet(endpoint_url + "?contact=09d23a05-47fe-11e4-bfe9-b8f6b119e9ab", [self.admin], results=[]) + + # filter by contact uuid present + self.assertGet(endpoint_url + f"?contact={bob.uuid}", [self.admin], results=[ticket3, ticket2]) + + # filter further by ticket uuid + self.assertGet(endpoint_url + f"?uuid={ticket3.uuid}", [self.admin], results=[ticket3]) diff --git a/temba/api/v2/tests/test_topics.py b/temba/api/v2/tests/test_topics.py new file mode 100644 index 00000000000..2d32140a2b8 --- /dev/null +++ b/temba/api/v2/tests/test_topics.py @@ -0,0 +1,127 @@ +from django.test import override_settings +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime +from temba.tests import matchers +from temba.tickets.models import Topic + +from . import APITest + + +class TopicsEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.topics") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotPermitted(endpoint_url, [None, self.agent, self.user]) + self.assertDeleteNotAllowed(endpoint_url) + + # create some topics + support = Topic.create(self.org, self.admin, "Support") + sales = Topic.create(self.org, self.admin, "Sales") + other_org = Topic.create(self.org2, self.admin, "Bugs") + + contact = self.create_contact("Ann", phone="+1234567890") + self.create_ticket(contact, topic=support) + + # no filtering + self.assertGet( + endpoint_url, + [self.user, self.editor], + results=[ + { + "uuid": str(sales.uuid), + "name": "Sales", + "counts": {"open": 0, "closed": 0}, + "system": False, + "created_on": format_datetime(sales.created_on), + }, + { + "uuid": str(support.uuid), + "name": "Support", + "counts": {"open": 1, "closed": 0}, + "system": False, + "created_on": format_datetime(support.created_on), + }, + { + "uuid": str(self.org.default_ticket_topic.uuid), + "name": "General", + "counts": {"open": 0, "closed": 0}, + "system": True, + "created_on": format_datetime(self.org.default_ticket_topic.created_on), + }, + ], + num_queries=self.BASE_SESSION_QUERIES + 3, + ) + + # try to create empty topic + response = self.assertPost(endpoint_url, self.editor, {}, errors={"name": "This field is required."}) + + # create new topic + response = self.assertPost(endpoint_url, self.editor, {"name": "Food"}, status=201) + + food = Topic.objects.get(name="Food") + self.assertEqual( + response.json(), + { + "uuid": str(food.uuid), + "name": "Food", + "counts": {"open": 0, "closed": 0}, + "system": False, + "created_on": matchers.ISODate(), + }, + ) + + # try to create another topic with same name + self.assertPost(endpoint_url, self.editor, {"name": "Food"}, errors={"name": "This field must be unique."}) + + # it's fine if a topic in another org has that name + self.assertPost(endpoint_url, self.editor, {"name": "Bugs"}, status=201) + + # try to create a topic with invalid name + self.assertPost(endpoint_url, self.editor, {"name": '"Hi"'}, errors={"name": 'Cannot contain the character: "'}) + + # try to create a topic with name that's too long + self.assertPost( + endpoint_url, + self.editor, + {"name": "x" * 65}, + errors={"name": "Ensure this field has no more than 64 characters."}, + ) + + # update topic by UUID + self.assertPost(endpoint_url + f"?uuid={support.uuid}", self.admin, {"name": "Support Tickets"}) + + support.refresh_from_db() + self.assertEqual(support.name, "Support Tickets") + + # can't update default topic for an org + self.assertPost( + endpoint_url + f"?uuid={self.org.default_ticket_topic.uuid}", + self.admin, + {"name": "Won't work"}, + errors={None: "Cannot modify system object."}, + status=403, + ) + + # can't update topic from other org + self.assertPost(endpoint_url + f"?uuid={other_org.uuid}", self.admin, {"name": "Won't work"}, status=404) + + # can't update topic to same name as existing topic + self.assertPost( + endpoint_url + f"?uuid={support.uuid}", + self.admin, + {"name": "General"}, + errors={"name": "This field must be unique."}, + ) + + # try creating a new topic after reaching the limit + current_count = self.org.topics.filter(is_system=False, is_active=True).count() + with override_settings(ORG_LIMIT_DEFAULTS={"topics": current_count}): + response = self.assertPost( + endpoint_url, + self.admin, + {"name": "Interesting"}, + errors={None: "Cannot create object because workspace has reached limit of 4."}, + status=409, + ) diff --git a/temba/api/v2/tests/test_users.py b/temba/api/v2/tests/test_users.py new file mode 100644 index 00000000000..c448b946ee9 --- /dev/null +++ b/temba/api/v2/tests/test_users.py @@ -0,0 +1,73 @@ +from django.urls import reverse + +from temba.api.v2.serializers import format_datetime + +from . import APITest + + +class UsersEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.users") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + self.assertGet( + endpoint_url, + [self.agent, self.user, self.editor, self.admin], + results=[ + { + "email": "agent@textit.com", + "first_name": "Agnes", + "last_name": "", + "role": "agent", + "team": {"uuid": str(self.org.default_ticket_team.uuid), "name": "All Topics"}, + "created_on": format_datetime(self.agent.date_joined), + "avatar": None, + }, + { + "email": "viewer@textit.com", + "first_name": "", + "last_name": "", + "role": "viewer", + "team": None, + "created_on": format_datetime(self.user.date_joined), + "avatar": None, + }, + { + "email": "editor@textit.com", + "first_name": "Ed", + "last_name": "McEdits", + "role": "editor", + "team": None, + "created_on": format_datetime(self.editor.date_joined), + "avatar": None, + }, + { + "email": "admin@textit.com", + "first_name": "Andy", + "last_name": "", + "role": "administrator", + "team": None, + "created_on": format_datetime(self.admin.date_joined), + "avatar": None, + }, + ], + # one query per user for their settings + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by email + self.assertGet( + f"{endpoint_url}?email=agent@textit.com&email=EDITOR@textit.com", + [self.agent], + results=[self.agent, self.editor], + num_queries=self.BASE_SESSION_QUERIES + 2, + ) + + # filter by roles + self.assertGet(endpoint_url + "?role=agent&role=editor", [self.editor], results=[self.agent, self.editor]) + + # non-existent roles ignored + self.assertGet(endpoint_url + "?role=caretaker&role=editor", [self.editor], results=[self.editor]) diff --git a/temba/api/v2/tests/test_workspace.py b/temba/api/v2/tests/test_workspace.py new file mode 100644 index 00000000000..68cffb00c74 --- /dev/null +++ b/temba/api/v2/tests/test_workspace.py @@ -0,0 +1,47 @@ +from django.urls import reverse + +from . import APITest + + +class WorkspaceEndpointTest(APITest): + def test_endpoint(self): + endpoint_url = reverse("api.v2.workspace") + ".json" + + self.assertGetNotPermitted(endpoint_url, [None]) + self.assertPostNotAllowed(endpoint_url) + self.assertDeleteNotAllowed(endpoint_url) + + # no filtering options.. just gets the current org + self.assertGet( + endpoint_url, + [self.agent, self.user, self.editor, self.admin], + raw={ + "uuid": str(self.org.uuid), + "name": "Nyaruka", + "country": "RW", + "languages": ["eng", "kin"], + "primary_language": "eng", + "timezone": "Africa/Kigali", + "date_style": "day_first", + "credits": {"used": -1, "remaining": -1}, + "anon": False, + }, + ) + + self.org.set_flow_languages(self.admin, ["kin"]) + + self.assertGet( + endpoint_url, + [self.agent], + raw={ + "uuid": str(self.org.uuid), + "name": "Nyaruka", + "country": "RW", + "languages": ["kin"], + "primary_language": "kin", + "timezone": "Africa/Kigali", + "date_style": "day_first", + "credits": {"used": -1, "remaining": -1}, + "anon": False, + }, + ) diff --git a/temba/api/v2/views.py b/temba/api/v2/views.py index ff3f0002dab..f0d268d2674 100644 --- a/temba/api/v2/views.py +++ b/temba/api/v2/views.py @@ -9,22 +9,23 @@ from rest_framework.reverse import reverse from smartmin.views import SmartTemplateView -from django.db.models import Count, Prefetch, Q +from django.db.models import OuterRef, Prefetch, Q from django.utils.translation import gettext_lazy as _ from temba.archives.models import Archive from temba.campaigns.models import Campaign, CampaignEvent from temba.channels.models import Channel, ChannelEvent from temba.classifiers.models import Classifier -from temba.contacts.models import Contact, ContactField, ContactGroup, ContactGroupCount, ContactNote, ContactURN -from temba.flows.models import Flow, FlowRun, FlowStart +from temba.contacts.models import Contact, ContactField, ContactGroup, ContactNote, ContactURN +from temba.flows.models import Flow, FlowRun, FlowStart, FlowStartCount from temba.globals.models import Global from temba.locations.models import AdminBoundary, BoundaryAlias -from temba.msgs.models import Broadcast, Label, LabelCount, Media, Msg, OptIn, SystemLabel +from temba.msgs.models import Broadcast, BroadcastMsgCount, Label, LabelCount, Media, Msg, OptIn, SystemLabel from temba.orgs.models import OrgMembership, User -from temba.orgs.views import OrgPermsMixin -from temba.tickets.models import Ticket, TicketCount, Topic +from temba.orgs.views.mixins import OrgPermsMixin +from temba.tickets.models import Ticket, Topic from temba.utils import str_to_bool +from temba.utils.db.queries import SubqueryCount, or_list from temba.utils.uuid import is_uuid from ..models import APIPermission, Resthook, ResthookSubscriber, SSLPermission, WebHookEvent @@ -100,10 +101,6 @@ class ExplorerView(OrgPermsMixin, SmartTemplateView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) - org = self.request.org - user = self.request.user - - context["api_token"] = user.get_api_tokens(org).order_by("created").last() context["endpoints"] = [ ArchivesEndpoint.get_read_explorer(), BoundariesEndpoint.get_read_explorer(), @@ -325,12 +322,12 @@ class ArchivesEndpoint(ListAPIMixin, BaseEndpoint): A `GET` returns the archives for your organization with the following fields. - * **archive_type** - the type of the archive, one of `message`or `run` (filterable as `archive_type`). + * **archive_type** - the type of the archive, one of `message` or `run` (filterable as `archive_type`). * **start_date** - the UTC date of the archive (string) (filterable as `before` and `after`). * **period** - `daily` for daily archives, `monthly` for monthly archives (filterable as `period`). * **record_count** - number of records in the archive (int). - * **size** - size of the gziped archive content (int). - * **hash** - MD5 hash of the gziped archive (string). + * **size** - size of the gzipped archive content (int). + * **hash** - MD5 hash of the gzipped archive (string). * **download_url** - temporary download URL of the archive (string). Example: @@ -345,13 +342,13 @@ class ArchivesEndpoint(ListAPIMixin, BaseEndpoint): "count": 248, "results": [ { - "archive_type":"message", - "start_date":"2017-02-20", - "period":"daily", - "record_count":1432, - "size":2304, - "hash":"feca9988b7772c003204a28bd741d0d0", - "download_url":"" + "archive_type": "message", + "start_date": "2017-02-20", + "period": "daily", + "record_count": 1432, + "size": 2304, + "hash": "feca9988b7772c003204a28bd741d0d0", + "download_url": "https://..." }, ... } @@ -504,7 +501,7 @@ class BroadcastsEndpoint(ListAPIMixin, WriteAPIMixin, BaseEndpoint): * **text** - the message text translations (dict of strings). * **attachments** - the attachment translations (dict of lists of strings). * **base_language** - the default translation language (string). - * **status** - the status of the message, one of `queued`, `sent`, `failed`. + * **status** - the status, one of `pending`, `queued`, `started`, `completed`, `failed`, `interrupted`. * **created_on** - when this broadcast was either created (datetime) (filterable as `before` and `after`). Example: @@ -586,6 +583,9 @@ def filter_queryset(self, queryset): return self.filter_before_after(queryset, "created_on") + def prepare_for_serialization(self, object_list, using: str): + BroadcastMsgCount.bulk_annotate(object_list) + @classmethod def get_read_explorer(cls): return { @@ -1172,8 +1172,7 @@ class ContactsEndpoint(ListAPIMixin, WriteAPIMixin, DeleteAPIMixin, BaseEndpoint ## Listing Contacts - A **GET** returns the list of contacts for your organization, in the order of last activity date. You can return - only deleted contacts by passing the `deleted=true` parameter to your call. + A **GET** returns the list of contacts for your organization, in the order of last modified. * **uuid** - the UUID of the contact (string), filterable as `uuid`. * **name** - the name of the contact (string). @@ -1503,6 +1502,9 @@ class Depends(Enum): all = 2 def get(self, request, *args, **kwargs): + if self.is_docs(): + return Response({}) + org = request.org params = request.query_params flow_uuids = params.getlist("flow") @@ -1631,9 +1633,17 @@ def derive_queryset(self): org = self.request.org return ( self.model.objects.filter(org=org, is_active=True, is_proxy=False) - .annotate(flow_count=Count("dependent_flows", filter=Q(dependent_flows__is_active=True))) - .annotate(group_count=Count("dependent_groups", filter=Q(dependent_groups__is_active=True))) - .annotate(campaignevent_count=Count("campaign_events", filter=Q(campaign_events__is_active=True))) + .annotate( + flow_count=SubqueryCount(Flow.objects.filter(field_dependencies__id=OuterRef("id"), is_active=True)) + ) + .annotate( + group_count=SubqueryCount(ContactGroup.objects.filter(query_fields__id=OuterRef("id"), is_active=True)) + ) + .annotate( + campaignevent_count=SubqueryCount( + CampaignEvent.objects.filter(relative_to__id=OuterRef("id"), is_active=True) + ) + ) ) def filter_queryset(self, queryset): @@ -1763,6 +1773,9 @@ def filter_queryset(self, queryset): return self.filter_before_after(queryset, "modified_on") + def prepare_for_serialization(self, object_list, using: str): + Flow.prefetch_run_stats(object_list, using=using) + @classmethod def get_read_explorer(cls): return { @@ -2042,7 +2055,7 @@ def filter_queryset(self, queryset): return queryset.filter(is_active=True).exclude(status=ContactGroup.STATUS_INITIALIZING) def prepare_for_serialization(self, object_list, using: str): - group_counts = ContactGroupCount.get_totals(object_list) + group_counts = ContactGroup.get_member_counts(object_list) for group in object_list: group.count = group_counts[group] @@ -2965,20 +2978,18 @@ class RunsEndpoint(ListAPIMixin, BaseEndpoint): "responded": true, "values": { "color": { + "name": "Color", "value": "blue", "category": "Blue", "node": "fc32aeb0-ac3e-42a8-9ea7-10248fdf52a1", - "time": "2015-11-11T13:03:51.635662Z", - "name": "color", - "input": "it is blue", + "time": "2015-11-11T13:03:51.635662Z" }, "reason": { + "name": "Reason", "value": "Because it's the color of sky", "category": "All Responses", "node": "4c9cb68d-474f-4b9a-b65e-c2aa593a3466", - "time": "2015-11-11T13:05:57.576056Z", - "name": "reason", - "input" "Because it's the color of sky", + "time": "2015-11-11T13:05:57.576056Z" } }, "created_on": "2015-11-11T13:05:57.457742Z", @@ -3094,13 +3105,11 @@ class FlowStartsEndpoint(ListAPIMixin, WriteAPIMixin, BaseEndpoint): By making a `GET` request you can list all the manual flow starts on your organization, in the order of last modified. Each flow start has the following attributes: - * **uuid** - the UUID of this flow start (string). + * **uuid** - the UUID of this flow start (string), filterable as `uuid`. * **flow** - the flow which was started (object). * **contacts** - the list of contacts that were started in the flow (objects). * **groups** - the list of groups that were started in the flow (objects). - * **restart_participants** - whether the contacts were restarted in this flow (boolean). - * **exclude_active** - whether the active contacts in other flows were excluded in this flow start (boolean). - * **status** - the status of this flow start. + * **status** - the status, one of `pending`, `queued`, `started`, `completed`, `failed`, `interrupted`. * **params** - the dictionary of extra parameters passed to the flow start (object). * **created_on** - the datetime when this flow start was created (datetime). * **modified_on** - the datetime when this flow start was modified (datetime). @@ -3172,7 +3181,7 @@ class FlowStartsEndpoint(ListAPIMixin, WriteAPIMixin, BaseEndpoint): "contacts": [ {"uuid": "f1ea776e-c923-4c1a-b3a3-0c466932b2cc", "name": "Wanz"} ], - "status": "complete", + "status": "pending", "params": { "first_name": "Ryan", "last_name": "Lewis" @@ -3189,14 +3198,9 @@ class FlowStartsEndpoint(ListAPIMixin, WriteAPIMixin, BaseEndpoint): pagination_class = ModifiedOnCursorPagination def filter_queryset(self, queryset): - # ignore flow starts created by mailroom + # ignore flow starts created by flows or triggers queryset = queryset.exclude(created_by=None) - # filter by id (optional and deprecated) - start_id = self.get_int_param("id") - if start_id: - queryset = queryset.filter(id=start_id) - # filter by UUID (optional) uuid = self.get_uuid_param("uuid") if uuid: @@ -3220,6 +3224,9 @@ def post_save(self, instance): # actually start our flow instance.async_start() + def prepare_for_serialization(self, object_list, using: str): + FlowStartCount.bulk_annotate(object_list) + @classmethod def get_read_explorer(cls): return { @@ -3379,7 +3386,7 @@ class TicketActionsEndpoint(BulkWriteAPIMixin, BaseEndpoint): { "tickets": ["55b6606d-9e89-45d1-a3e2-dc11f19f78df", "bef96b71-865d-480a-a660-33db466a210a"], "action": "assign", - "assignee": "jim@nyaruka.com" + "assignee": "jim@textit.com" } You will receive an empty response with status code 204 if successful. @@ -3461,8 +3468,8 @@ def get_queryset(self): return super().get_queryset().filter(is_active=True) def prepare_for_serialization(self, object_list, using: str): - open_counts = TicketCount.get_by_topics(self.request.org, object_list, Ticket.STATUS_OPEN) - closed_counts = TicketCount.get_by_topics(self.request.org, object_list, Ticket.STATUS_CLOSED) + open_counts = Ticket.get_topic_counts(self.request.org, object_list, Ticket.STATUS_OPEN) + closed_counts = Ticket.get_topic_counts(self.request.org, object_list, Ticket.STATUS_CLOSED) for topic in object_list: topic.open_count = open_counts[topic] topic.closed_count = closed_counts[topic] @@ -3491,10 +3498,11 @@ class UsersEndpoint(ListAPIMixin, BaseEndpoint): A **GET** returns the users in your workspace, ordered by newest created first. - * **email** - the email address of the user (string). + * **email** - the email address of the user (string), filterable as `email`. * **first_name** - the first name of the user (string). * **last_name** - the last name of the user (string). - * **role** - the role of the user (string), filterable as `role` which can be repeated. + * **role** - the role of the user (string), filterable as `role`. + * **team** - team user belongs to (object). * **created_on** - when this user was created (datetime). Example: @@ -3513,6 +3521,7 @@ class UsersEndpoint(ListAPIMixin, BaseEndpoint): "first_name": "Bob", "last_name": "McFlow", "role": "agent", + "team": {"uuid": "f5901b62-ba76-4003-9c62-72fdacc1b7b7", "name": "All Topics"}, "created_on": "2013-03-02T17:28:12.123456Z" }, ... @@ -3533,17 +3542,25 @@ def derive_queryset(self): else: roles = None - return org.get_users(roles=roles).prefetch_related("settings") + return org.get_users(roles=roles) + + def filter_queryset(self, queryset): + # filter by email if specified + emails = self.request.query_params.getlist("email") + if emails: + queryset = queryset.filter(or_list([Q(email__iexact=e) for e in emails])) + + return queryset def get_serializer_context(self): context = super().get_serializer_context() - # build a map of users to roles - user_roles = {} - for m in OrgMembership.objects.filter(org=self.request.org).select_related("user"): - user_roles[m.user] = m.role + # build a map of users to memberships + memberships = {} + for m in OrgMembership.objects.filter(org=self.request.org).select_related("user", "team"): + memberships[m.user] = m - context["user_roles"] = user_roles + context["memberships"] = memberships return context @classmethod @@ -3553,7 +3570,10 @@ def get_read_explorer(cls): "title": "List Users", "url": reverse("api.v2.users"), "slug": "user-list", - "params": [], + "params": [ + {"name": "email", "required": False, "help": "Only return users with this email"}, + {"name": "role", "required": False, "help": "Only return users with this role"}, + ], } @@ -3573,7 +3593,7 @@ class WorkspaceEndpoint(BaseEndpoint): { "uuid": "6a44ca78-a4c2-4862-a7d3-2932f9b3a7c3", - "name": "Nyaruka", + "name": "TextIt", "country": "RW", "languages": ["eng", "fra"], "timezone": "Africa/Kigali", diff --git a/temba/api/views.py b/temba/api/views.py index 411dfa9b58d..e5392eaaaa1 100644 --- a/temba/api/views.py +++ b/temba/api/views.py @@ -4,18 +4,19 @@ import iso8601 from rest_framework import generics, mixins, status from rest_framework.response import Response -from smartmin.views import SmartCRUDL, SmartDeleteView +from smartmin.views import SmartCRUDL from django.db import transaction from django.http import HttpResponseRedirect +from django.urls import reverse from django.utils.translation import gettext_lazy as _ from temba import mailroom from temba.api.support import InvalidQueryError from temba.contacts.models import URN -from temba.orgs.views import ModalMixin, OrgObjPermsMixin +from temba.orgs.views.base import BaseDeleteModal, BaseListView from temba.utils.models import TembaModel -from temba.utils.views import NonAtomicMixin +from temba.utils.views.mixins import ContextMenuMixin, NonAtomicMixin, SpaMixin from .models import APIToken, BulkActionFailure @@ -278,13 +279,38 @@ def perform_destroy(self, instance): class APITokenCRUDL(SmartCRUDL): model = APIToken - actions = ("delete",) + actions = ("list", "delete") - class Delete(ModalMixin, OrgObjPermsMixin, SmartDeleteView): + class List(SpaMixin, ContextMenuMixin, BaseListView): + title = _("API Tokens") + menu_path = "/settings/account" + paginate_by = None + token_limit = 3 + + def build_context_menu(self, menu): + if self.request.user.get_api_tokens(self.request.org).count() < self.token_limit: + menu.add_url_post(_("New"), reverse("api.apitoken_list"), as_button=True) + + def get_context_data(self, **kwargs): + context = super().get_context_data(**kwargs) + context["token_limit"] = self.token_limit + return context + + def get_queryset(self, **kwargs): + return self.request.user.get_api_tokens(self.request.org).order_by("created") + + def post(self, request, *args, **kwargs): + # there's no create view - just a POST to this view + if self.request.user.get_api_tokens(self.request.org).count() < self.token_limit: + APIToken.create(self.request.org, self.request.user) + + return HttpResponseRedirect(reverse("api.apitoken_list")) + + class Delete(BaseDeleteModal): slug_url_kwarg = "key" fields = ("key",) - cancel_url = "@orgs.user_tokens" - redirect_url = "@orgs.user_tokens" + cancel_url = "@api.apitoken_list" + redirect_url = "@api.apitoken_list" submit_button_name = _("Delete") def has_permission(self, request, *args, **kwargs): diff --git a/temba/apks/views.py b/temba/apks/views.py index 316c2b6a777..c04fb52d8b1 100644 --- a/temba/apks/views.py +++ b/temba/apks/views.py @@ -1,6 +1,6 @@ from smartmin.views import SmartCreateView, SmartCRUDL, SmartListView, SmartUpdateView -from temba.utils.views import StaffOnlyMixin +from temba.utils.views.mixins import StaffOnlyMixin from .models import Apk diff --git a/temba/archives/models.py b/temba/archives/models.py index 4c0f21711ae..71972bc14cf 100644 --- a/temba/archives/models.py +++ b/temba/archives/models.py @@ -14,7 +14,7 @@ from django.db.models import Q from django.utils import timezone -from temba.utils import json, s3, sizeof_fmt +from temba.utils import json, s3 from temba.utils.s3 import EventStreamReader KEY_PATTERN = re.compile(r"^(?P\d+)/(?Prun|message)_(?P(D|M)\d+)_(?P[0-9a-f]{32})\.jsonl\.gz$") @@ -35,33 +35,20 @@ class Archive(models.Model): archive_type = models.CharField(choices=TYPE_CHOICES, max_length=16) created_on = models.DateTimeField(default=timezone.now) - # the length of time this archive covers period = models.CharField(max_length=1, choices=PERIOD_CHOICES, default=PERIOD_DAILY) + start_date = models.DateField() # the earliest modified_on date for records (inclusive) + record_count = models.IntegerField(default=0) # number of records in this archive + size = models.BigIntegerField(default=0) # size in bytes of the archive contents (after compression) + hash = models.TextField() # MD5 hash of the archive contents (after compression) + url = models.URLField() # full URL of this archive + build_time = models.IntegerField() # time in ms it took to build and upload this archive - # the earliest modified_on date for records in this archive (inclusive) - start_date = models.DateField() - - # number of records in this archive - record_count = models.IntegerField(default=0) - - # size in bytes of the archive contents (after compression) - size = models.BigIntegerField(default=0) - - # MD5 hash of the archive contents (after compression) - hash = models.TextField() - - # full URL of this archive - url = models.URLField() + # archive we were rolled up into, if any + rollup = models.ForeignKey("archives.Archive", on_delete=models.PROTECT, null=True) # whether the records in this archive need to be deleted needs_deletion = models.BooleanField(default=False) - # number of milliseconds it took to build and upload this archive - build_time = models.IntegerField() - - # archive we were rolled up into, if any - rollup = models.ForeignKey("archives.Archive", on_delete=models.PROTECT, null=True) - # when this archive's records where deleted (if any) deleted_on = models.DateTimeField(null=True) @@ -69,9 +56,6 @@ class Archive(models.Model): def storage(cls): return storages["archives"] - def size_display(self): - return sizeof_fmt(self.size) - def get_storage_location(self) -> tuple: """ Returns a tuple of the storage bucket and key diff --git a/temba/archives/tests/__init__.py b/temba/archives/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/temba/archives/tests.py b/temba/archives/tests/test_archive.py similarity index 62% rename from temba/archives/tests.py rename to temba/archives/tests/test_archive.py index aa3eeee02c6..d3fe8e68b8a 100644 --- a/temba/archives/tests.py +++ b/temba/archives/tests/test_archive.py @@ -1,16 +1,10 @@ import base64 -import gzip -import hashlib -import io from datetime import date, datetime, timezone as tzone -from django.urls import reverse - -from temba.tests import CRUDLTestMixin, TembaTest +from temba.archives.models import Archive +from temba.tests import TembaTest from temba.utils import s3 -from .models import Archive, jsonlgz_rewrite - class ArchiveTest(TembaTest): def setUp(self): @@ -201,100 +195,3 @@ def purge_jim(record): self.assertEqual(hash_b64, self.s3_calls[-2][1]["ContentMD5"]) self.assertEqual("DeleteObject", self.s3_calls[-1][0]) self.assertEqual("test-archives", self.s3_calls[-1][1]["Bucket"]) - - -class ArchiveCRUDLTest(TembaTest, CRUDLTestMixin): - def test_empty_list(self): - response = self.assertListFetch(reverse("archives.archive_run"), [self.editor], context_objects=[]) - self.assertContains(response, "No archives found") - - response = self.assertListFetch(reverse("archives.archive_message"), [self.editor], context_objects=[]) - self.assertContains(response, "No archives found") - - def test_archive_type_filter(self): - # a daily archive that has been rolled up and will not appear in the results - d1 = self.create_archive(Archive.TYPE_MSG, "D", date(2020, 7, 31), [{"id": 1}, {"id": 2}]) - m1 = self.create_archive(Archive.TYPE_MSG, "M", date(2020, 7, 1), [{"id": 1}, {"id": 2}], rollup_of=(d1,)) - d2 = self.create_archive(Archive.TYPE_MSG, "D", date(2020, 8, 1), [{"id": 3}, {"id": 4}]) - d3 = self.create_archive(Archive.TYPE_FLOWRUN, "D", date(2020, 8, 1), [{"id": 3}, {"id": 4}]) - - # create archive for other org - self.create_archive(Archive.TYPE_MSG, "D", date(2020, 7, 31), [{"id": 1}], org=self.org2) - - runs_url = reverse("archives.archive_run") - msgs_url = reverse("archives.archive_message") - - self.assertRequestDisallowed(runs_url, [None, self.user, self.agent]) - self.assertRequestDisallowed(msgs_url, [None, self.user, self.agent]) - - response = self.assertListFetch(runs_url, [self.editor, self.admin], context_objects=[d3]) - self.assertContains(response, f"/archive/read/{d3.id}/") - - response = self.assertListFetch(msgs_url, [self.editor, self.admin], context_objects=[d2, m1]) - self.assertContains(response, f"/archive/read/{d2.id}/") - self.assertContains(response, f"/archive/read/{m1.id}/") - - def test_read(self): - archive = self.create_archive(Archive.TYPE_MSG, "D", date(2020, 7, 31), [{"id": 1}, {"id": 2}]) - - download_url = ( - f"http://minio:9000/test-archives/{self.org.id}/message_D20200731_{archive.hash}.jsonl.gz?response-con" - f"tent-disposition=attachment%3B&response-content-type=application%2Foctet&response-content-encoding=none" - ) - - self.assertRequestDisallowed(download_url, [None, self.user, self.agent, self.admin2]) - response = self.assertReadFetch( - reverse("archives.archive_read", args=[archive.id]), [self.editor, self.admin], status=302 - ) - - self.assertIn(download_url, response.get("Location")) - - -class JSONLGZTest(TembaTest): - def test_jsonlgz_rewrite(self): - def rewrite(b: bytes, transform): - in_file = io.BytesIO(b) - out_file = io.BytesIO() - md5, size = jsonlgz_rewrite(in_file, out_file, transform) - return out_file.getvalue(), md5.hexdigest(), size - - data = b'{"id": 123, "name": "Jim"}\n{"id": 234, "name": "Bob"}\n{"id": 345, "name": "Ann"}\n' - gzipped = gzip.compress(data) - - # rewrite that using a pass-through transform for each record - data1, hash1, size1 = rewrite(gzipped, lambda r: r) - - self.assertEqual(data, gzip.decompress(data1)) - self.assertEqual(hashlib.md5(data1).hexdigest(), hash1) - self.assertEqual(68, size1) - - # should get the exact same file and hash if we just repeat that - data2, hash2, size2 = rewrite(gzipped, lambda r: r) - - self.assertEqual(data1, data2) - self.assertEqual(hash1, hash2) - self.assertEqual(68, size2) - - # rewrite with a transform that modifies each record - def name_to_upper(record) -> dict: - record["name"] = record["name"].upper() - return record - - data3, hash3, size3 = rewrite(gzipped, name_to_upper) - - self.assertEqual( - b'{"id": 123, "name": "JIM"}\n{"id": 234, "name": "BOB"}\n{"id": 345, "name": "ANN"}\n', - gzip.decompress(data3), - ) - self.assertEqual(hashlib.md5(data3).hexdigest(), hash3) - self.assertEqual(68, size3) - - # rewrite with a transform that removes a record - def remove_bob(record) -> dict: - return None if record["id"] == 234 else record - - data4, hash4, size4 = rewrite(gzipped, remove_bob) - - self.assertEqual(b'{"id": 123, "name": "Jim"}\n{"id": 345, "name": "Ann"}\n', gzip.decompress(data4)) - self.assertEqual(hashlib.md5(data4).hexdigest(), hash4) - self.assertEqual(58, size4) diff --git a/temba/archives/tests/test_archivecrudl.py b/temba/archives/tests/test_archivecrudl.py new file mode 100644 index 00000000000..2e71299503e --- /dev/null +++ b/temba/archives/tests/test_archivecrudl.py @@ -0,0 +1,46 @@ +from datetime import date + +from django.urls import reverse + +from temba.archives.models import Archive +from temba.tests import CRUDLTestMixin, TembaTest + + +class ArchiveCRUDLTest(TembaTest, CRUDLTestMixin): + def test_list_views(self): + # a daily archive that has been rolled up and will not appear in the results + d1 = self.create_archive(Archive.TYPE_MSG, "D", date(2020, 7, 31), [{"id": 1}, {"id": 2}]) + m1 = self.create_archive(Archive.TYPE_MSG, "M", date(2020, 7, 1), [{"id": 1}, {"id": 2}], rollup_of=(d1,)) + d2 = self.create_archive(Archive.TYPE_MSG, "D", date(2020, 8, 1), [{"id": 3}, {"id": 4}]) + d3 = self.create_archive(Archive.TYPE_FLOWRUN, "D", date(2020, 8, 1), [{"id": 3}, {"id": 4}]) + + # create archive for other org + self.create_archive(Archive.TYPE_MSG, "D", date(2020, 7, 31), [{"id": 1}], org=self.org2) + + runs_url = reverse("archives.archive_run") + msgs_url = reverse("archives.archive_message") + + self.assertRequestDisallowed(runs_url, [None, self.user, self.agent]) + self.assertRequestDisallowed(msgs_url, [None, self.user, self.agent]) + + response = self.assertListFetch(runs_url, [self.editor, self.admin], context_objects=[d3]) + self.assertContains(response, f"/archive/read/{d3.id}/") + + response = self.assertListFetch(msgs_url, [self.editor, self.admin], context_objects=[d2, m1]) + self.assertContains(response, f"/archive/read/{d2.id}/") + self.assertContains(response, f"/archive/read/{m1.id}/") + + def test_read(self): + archive = self.create_archive(Archive.TYPE_MSG, "D", date(2020, 7, 31), [{"id": 1}, {"id": 2}]) + + download_url = ( + f"http://minio:9000/test-archives/{self.org.id}/message_D20200731_{archive.hash}.jsonl.gz?response-con" + f"tent-disposition=attachment%3B&response-content-type=application%2Foctet&response-content-encoding=none" + ) + + self.assertRequestDisallowed(download_url, [None, self.user, self.agent, self.admin2]) + response = self.assertReadFetch( + reverse("archives.archive_read", args=[archive.id]), [self.editor, self.admin], status=302 + ) + + self.assertIn(download_url, response.get("Location")) diff --git a/temba/archives/tests/test_misc.py b/temba/archives/tests/test_misc.py new file mode 100644 index 00000000000..c196671f8d6 --- /dev/null +++ b/temba/archives/tests/test_misc.py @@ -0,0 +1,56 @@ +import gzip +import hashlib +import io + +from temba.archives.models import jsonlgz_rewrite +from temba.tests import TembaTest + + +class JSONLGZTest(TembaTest): + def test_jsonlgz_rewrite(self): + def rewrite(b: bytes, transform): + in_file = io.BytesIO(b) + out_file = io.BytesIO() + md5, size = jsonlgz_rewrite(in_file, out_file, transform) + return out_file.getvalue(), md5.hexdigest(), size + + data = b'{"id": 123, "name": "Jim"}\n{"id": 234, "name": "Bob"}\n{"id": 345, "name": "Ann"}\n' + gzipped = gzip.compress(data) + + # rewrite that using a pass-through transform for each record + data1, hash1, size1 = rewrite(gzipped, lambda r: r) + + self.assertEqual(data, gzip.decompress(data1)) + self.assertEqual(hashlib.md5(data1).hexdigest(), hash1) + self.assertEqual(68, size1) + + # should get the exact same file and hash if we just repeat that + data2, hash2, size2 = rewrite(gzipped, lambda r: r) + + self.assertEqual(data1, data2) + self.assertEqual(hash1, hash2) + self.assertEqual(68, size2) + + # rewrite with a transform that modifies each record + def name_to_upper(record) -> dict: + record["name"] = record["name"].upper() + return record + + data3, hash3, size3 = rewrite(gzipped, name_to_upper) + + self.assertEqual( + b'{"id": 123, "name": "JIM"}\n{"id": 234, "name": "BOB"}\n{"id": 345, "name": "ANN"}\n', + gzip.decompress(data3), + ) + self.assertEqual(hashlib.md5(data3).hexdigest(), hash3) + self.assertEqual(68, size3) + + # rewrite with a transform that removes a record + def remove_bob(record) -> dict: + return None if record["id"] == 234 else record + + data4, hash4, size4 = rewrite(gzipped, remove_bob) + + self.assertEqual(b'{"id": 123, "name": "Jim"}\n{"id": 345, "name": "Ann"}\n', gzip.decompress(data4)) + self.assertEqual(hashlib.md5(data4).hexdigest(), hash4) + self.assertEqual(58, size4) diff --git a/temba/archives/views.py b/temba/archives/views.py index 99f2c246915..3aa34ea3956 100644 --- a/temba/archives/views.py +++ b/temba/archives/views.py @@ -1,11 +1,11 @@ from gettext import gettext as _ -from smartmin.views import SmartCRUDL, SmartListView, SmartReadView +from smartmin.views import SmartCRUDL from django.http import HttpResponseRedirect -from temba.orgs.views import OrgObjPermsMixin, OrgPermsMixin -from temba.utils.views import SpaMixin +from temba.orgs.views.base import BaseListView, BaseReadView +from temba.utils.views.mixins import SpaMixin from .models import Archive @@ -13,54 +13,43 @@ class ArchiveCRUDL(SmartCRUDL): model = Archive actions = ("read", "run", "message") - permissions = True - class BaseList(SpaMixin, OrgPermsMixin, SmartListView): - title = _("Archive") + class BaseList(SpaMixin, BaseListView): fields = ("url", "start_date", "period", "record_count", "size") default_order = ("-start_date", "-period", "archive_type") - paginate_by = 250 - - def get_queryset(self, **kwargs): - queryset = super().get_queryset(**kwargs) - - # filter by our archive type - return queryset.filter(org=self.request.org, archive_type=self.get_archive_type()).exclude( - rollup_id__isnull=False + default_template = "archives/archive_list.html" + + def derive_queryset(self, **kwargs): + # filter by our archive type and exclude archives included in rollups + return ( + super() + .derive_queryset(**kwargs) + .filter(archive_type=self.get_archive_type()) + .exclude(rollup_id__isnull=False) ) - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context["archive_types"] = Archive.TYPE_CHOICES - context["selected"] = self.get_archive_type() - return context - class Run(BaseList): + title = _("Run Archives") menu_path = "/settings/archives/run" @classmethod def derive_url_pattern(cls, path, action): return r"^%s/%s/$" % (path, Archive.TYPE_FLOWRUN) - def derive_title(self): - return _("Run Archives") - def get_archive_type(self): return Archive.TYPE_FLOWRUN class Message(BaseList): + title = _("Message Archives") menu_path = "/settings/archives/message" @classmethod def derive_url_pattern(cls, path, action): return r"^%s/%s/$" % (path, Archive.TYPE_MSG) - def derive_title(self): - return _("Message Archives") - def get_archive_type(self): return Archive.TYPE_MSG - class Read(OrgObjPermsMixin, SmartReadView): + class Read(BaseReadView): def render_to_response(self, context, **response_kwargs): - return HttpResponseRedirect(self.get_object().get_download_link()) + return HttpResponseRedirect(self.object.get_download_link()) diff --git a/temba/campaigns/migrations/0060_archive_deleted_groups.py b/temba/campaigns/migrations/0060_archive_deleted_groups.py index 6e72ade2392..78b299f5143 100644 --- a/temba/campaigns/migrations/0060_archive_deleted_groups.py +++ b/temba/campaigns/migrations/0060_archive_deleted_groups.py @@ -4,7 +4,7 @@ from django.utils import timezone -def archive_campaigns_with_deleted_groups(apps, schema_editor): +def archive_campaigns_with_deleted_groups(apps, schema_editor): # pragma: no cover Campaign = apps.get_model("campaigns", "Campaign") num_archived = 0 @@ -19,7 +19,7 @@ def archive_campaigns_with_deleted_groups(apps, schema_editor): print(f"Archived {num_archived} campaigns with deleted groups.") -def reverse(apps, schema_editor): +def reverse(apps, schema_editor): # pragma: no cover pass diff --git a/temba/campaigns/tests/__init__.py b/temba/campaigns/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/temba/campaigns/tests.py b/temba/campaigns/tests/test_campaign.py similarity index 64% rename from temba/campaigns/tests.py rename to temba/campaigns/tests/test_campaign.py index c08f9aa0816..098ccc463f7 100644 --- a/temba/campaigns/tests.py +++ b/temba/campaigns/tests/test_campaign.py @@ -8,16 +8,13 @@ from django.urls import reverse from django.utils import timezone -from temba.campaigns.views import CampaignEventCRUDL +from temba.campaigns.models import Campaign, CampaignEvent, EventFire +from temba.campaigns.tasks import trim_event_fires from temba.contacts.models import ContactField -from temba.flows.models import Flow, FlowRevision +from temba.flows.models import Flow from temba.msgs.models import Msg from temba.orgs.models import DefinitionExport, Org -from temba.tests import CRUDLTestMixin, MigrationTest, TembaTest, matchers, mock_mailroom -from temba.utils.views import TEMBA_MENU_SELECTION - -from .models import Campaign, CampaignEvent, EventFire -from .tasks import trim_event_fires +from temba.tests import TembaTest, matchers, mock_mailroom class CampaignTest(TembaTest): @@ -159,7 +156,7 @@ def test_get_sorted_events(self): # create a campaign campaign = Campaign.create(self.org, self.user, "Planting Reminders", self.farmers) - flow = self.create_flow("Test") + flow = self.create_flow("Test 1") event1 = CampaignEvent.create_flow_event( self.org, self.admin, campaign, self.planting_date, offset=1, unit="W", flow=flow, delivery_hour="13" @@ -173,22 +170,15 @@ def test_get_sorted_events(self): self.assertEqual(campaign.get_sorted_events(), [event2, event1, event3]) - flow_json = self.get_flow_json("favorites") - flow = Flow.objects.create( - name="Call Me Maybe", - org=self.org, - is_system=True, - created_by=self.admin, - modified_by=self.admin, - saved_by=self.admin, - version_number="13.5.0", - flow_type="V", - ) - - FlowRevision.objects.create(flow=flow, definition=flow_json, spec_version=3, revision=1, created_by=self.admin) - event4 = CampaignEvent.create_flow_event( - self.org, self.admin, campaign, self.planting_date, offset=2, unit="W", flow=flow, delivery_hour="5" + self.org, + self.admin, + campaign, + self.planting_date, + offset=2, + unit="W", + flow=self.create_flow("Test 2"), + delivery_hour="5", ) self.assertEqual(campaign.get_sorted_events(), [event2, event1, event3, event4]) @@ -211,7 +201,7 @@ def test_message_event(self): { "uuid": str(event.flow.uuid), "name": event.flow.name, - "spec_version": "13.5.0", + "spec_version": Flow.CURRENT_SPEC_VERSION, "revision": 1, "language": "eng", "type": "messaging_background", @@ -275,7 +265,7 @@ def test_views(self, mr_mocks): # don't log in, try to create a new campaign response = self.client.get(reverse("campaigns.campaign_create")) - self.assertRedirect(response, reverse("users.user_login")) + self.assertLoginRedirect(response) # ok log in as an org self.login(self.admin) @@ -801,7 +791,7 @@ def test_eventfire_get_relative_to_value(self): self.assertIsNotNone(ev4.get_relative_to_value()) def test_import(self): - self.import_file("the_clinic") + self.import_file("test_flows/the_clinic.json") self.assertEqual(1, Campaign.objects.count()) campaign = Campaign.objects.get() @@ -819,7 +809,7 @@ def test_import(self): # message flow should be migrated to latest engine spec self.assertEqual({"und": "This is a second campaign message"}, events[5].message) self.assertEqual("und", events[5].flow.base_language) - self.assertEqual("13.5.0", events[5].flow.version_number) + self.assertEqual(Flow.CURRENT_SPEC_VERSION, events[5].flow.version_number) def test_import_created_on_event(self): campaign = Campaign.create(self.org, self.admin, "New contact reminders", self.farmers) @@ -1139,629 +1129,3 @@ def test_create_message_event(self): self.assertEqual(campaign_event.message, {"eng": "oy, pancake man, come back"}) self.assertEqual(campaign_event.delivery_hour, -1) self.assertEqual(campaign_event.flow.flow_type, Flow.TYPE_BACKGROUND) - - -class CampaignCRUDLTest(TembaTest, CRUDLTestMixin): - def setUp(self): - super().setUp() - - self.create_field("registered", "Registered", value_type="D") - self.create_field("registered", "Registered", value_type="D", org=self.org2) - - def create_campaign(self, org, name, group): - user = org.get_admins().first() - registered = org.fields.get(key="registered") - flow = self.create_flow(f"{name} Flow", org=org) - campaign = Campaign.create(org, user, name, group) - CampaignEvent.create_flow_event( - org, user, campaign, registered, offset=1, unit="W", flow=flow, delivery_hour="13" - ) - return campaign - - def test_menu(self): - menu_url = reverse("campaigns.campaign_menu") - - group = self.create_group("My Group", contacts=[]) - self.create_campaign(self.org, "My Campaign", group) - - self.assertRequestDisallowed(menu_url, [None, self.agent]) - self.assertPageMenu(menu_url, self.admin, ["Active (1)", "Archived (0)"]) - - def test_create(self): - group = self.create_group("Reporters", contacts=[]) - - create_url = reverse("campaigns.campaign_create") - - self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) - self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=["name", "group"]) - - # try to submit with no data - self.assertCreateSubmit( - create_url, - self.admin, - {}, - form_errors={"name": "This field is required.", "group": "This field is required."}, - ) - - # submit with valid data - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Reminders", "group": group.id}, - new_obj_query=Campaign.objects.filter(name="Reminders", group=group), - ) - - def test_read(self): - group = self.create_group("Reporters", contacts=[]) - campaign = self.create_campaign(self.org, "Welcomes", group) - read_url = reverse("campaigns.campaign_read", args=[campaign.uuid]) - - self.assertRequestDisallowed(read_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(read_url, [self.user, self.editor, self.admin], context_object=campaign) - self.assertContains(response, "Welcomes") - self.assertContains(response, "Registered") - - self.assertContentMenu(read_url, self.admin, ["New Event", "Edit", "Export", "Archive"]) - - campaign.archive(self.admin) - - self.assertContentMenu(read_url, self.admin, ["Activate", "Export"]) - - def test_archive_and_activate(self): - group = self.create_group("Reporters", contacts=[]) - campaign = self.create_campaign(self.org, "Welcomes", group) - other_org_group = self.create_group("Reporters", contacts=[], org=self.org2) - other_org_campaign = self.create_campaign(self.org2, "Welcomes", other_org_group) - - archive_url = reverse("campaigns.campaign_archive", args=[campaign.id]) - - # can't archive campaign if not logged in - response = self.client.post(archive_url) - self.assertLoginRedirect(response) - - self.login(self.admin) - - response = self.client.post(archive_url) - self.assertEqual(302, response.status_code) - - campaign.refresh_from_db() - self.assertTrue(campaign.is_archived) - - # activate that archve - response = self.client.post(reverse("campaigns.campaign_activate", args=[campaign.id])) - self.assertEqual(302, response.status_code) - - campaign.refresh_from_db() - self.assertFalse(campaign.is_archived) - - # can't archive campaign from other org - response = self.client.post(reverse("campaigns.campaign_archive", args=[other_org_campaign.id])) - self.assertEqual(404, response.status_code) - - # check object is unchanged - other_org_campaign.refresh_from_db() - self.assertFalse(other_org_campaign.is_archived) - - @mock_mailroom - def test_update(self, mr_mocks): - group1 = self.create_group("Reporters", contacts=[]) - group2 = self.create_group("Testers", query="tester=1") - - campaign = self.create_campaign(self.org, "Welcomes", group1) - - update_url = reverse("campaigns.campaign_update", args=[campaign.id]) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch( - update_url, [self.editor, self.admin], form_fields={"name": "Welcomes", "group": group1.id} - ) - - # try to submit with empty name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "", "group": group1.id}, - form_errors={"name": "This field is required."}, - object_unchanged=campaign, - ) - - # submit with valid name - self.assertUpdateSubmit(update_url, self.admin, {"name": "Greetings", "group": group1.id}, success_status=200) - - campaign.refresh_from_db() - self.assertEqual("Greetings", campaign.name) - self.assertEqual(group1, campaign.group) - - # group didn't change so should only have dynamic group creation queued - self.assertEqual(1, len(mr_mocks.queued_batch_tasks)) - - # submit with group change - self.assertUpdateSubmit(update_url, self.admin, {"name": "Greetings", "group": group2.id}, success_status=200) - - campaign.refresh_from_db() - self.assertEqual("Greetings", campaign.name) - self.assertEqual(group2, campaign.group) - - # should have a task queued to reschedule the campaign's event - self.assertEqual(2, len(mr_mocks.queued_batch_tasks)) - self.assertEqual( - { - "type": "schedule_campaign_event", - "org_id": self.org.id, - "task": {"campaign_event_id": campaign.events.filter(is_active=True).get().id, "org_id": self.org.id}, - "queued_on": matchers.Datetime(), - }, - mr_mocks.queued_batch_tasks[1], - ) - - def test_list(self): - group = self.create_group("Reporters", contacts=[]) - campaign1 = self.create_campaign(self.org, "Welcomes", group) - campaign2 = self.create_campaign(self.org, "Follow Ups", group) - - other_org_group = self.create_group("Reporters", contacts=[], org=self.org2) - self.create_campaign(self.org2, "Welcomes", other_org_group) - - list_url = reverse("campaigns.campaign_list") - - self.assertRequestDisallowed(list_url, [None, self.agent]) - self.assertListFetch(list_url, [self.user, self.editor, self.admin], context_objects=[campaign2, campaign1]) - self.assertContentMenu(list_url, self.user, []) - self.assertContentMenu(list_url, self.admin, ["New Campaign"]) - - -class CampaignEventCRUDLTest(TembaTest, CRUDLTestMixin): - def setUp(self): - super().setUp() - - self.create_field("registered", "Registered", value_type="D") - - self.campaign1 = self.create_campaign(self.org, "Welcomes") - self.other_org_campaign = self.create_campaign(self.org2, "Welcomes") - - def create_campaign(self, org, name): - user = org.get_admins().first() - group = self.create_group("Reporters", contacts=[], org=org) - registered = self.org.fields.get(key="registered") - campaign = Campaign.create(org, user, name, group) - flow = self.create_flow(f"{name} Flow", org=org) - background_flow = self.create_flow(f"{name} Background Flow", org=org, flow_type=Flow.TYPE_BACKGROUND) - CampaignEvent.create_flow_event( - org, user, campaign, registered, offset=1, unit="W", flow=flow, delivery_hour="13" - ) - CampaignEvent.create_flow_event( - org, user, campaign, registered, offset=2, unit="W", flow=flow, delivery_hour="13" - ) - CampaignEvent.create_flow_event( - org, user, campaign, registered, offset=2, unit="W", flow=background_flow, delivery_hour="13" - ) - return campaign - - def test_read(self): - event = self.campaign1.events.order_by("id").first() - read_url = reverse("campaigns.campaignevent_read", args=[event.campaign.uuid, event.id]) - - self.assertRequestDisallowed(read_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(read_url, [self.user, self.editor, self.admin], context_object=event) - - self.assertContains(response, "Welcomes") - self.assertContains(response, "1 week after") - self.assertContains(response, "Registered") - self.assertEqual("/campaign/active/", response.headers.get(TEMBA_MENU_SELECTION)) - self.assertContentMenu(read_url, self.admin, ["Edit", "Delete"]) - - event.campaign.is_archived = True - event.campaign.save() - - # archived campaigns should focus the archived menu - response = self.assertReadFetch(read_url, [self.editor], context_object=event) - self.assertEqual("/campaign/archived/", response.headers.get(TEMBA_MENU_SELECTION)) - - self.assertContentMenu(read_url, self.admin, ["Delete"]) - - def test_create(self): - farmer1 = self.create_contact("Rob Jasper", phone="+250788111111") - farmer2 = self.create_contact("Mike Gordon", phone="+250788222222", language="kin") - self.create_contact("Trey Anastasio", phone="+250788333333") - farmers = self.create_group("Farmers", [farmer1, farmer2]) - - # create a contact field for our planting date - planting_date = self.create_field("planting_date", "Planting Date", ContactField.TYPE_DATETIME) - - # update the planting date for our contacts - self.set_contact_field(farmer1, "planting_date", "1/10/2020") - - # create a campaign for our farmers group - campaign = Campaign.create(self.org, self.admin, "Planting Reminders", farmers) - - create_url = f"{reverse('campaigns.campaignevent_create')}?campaign={campaign.id}" - - # update org to use a single flow language - self.org.set_flow_languages(self.admin, ["eng"]) - - non_lang_fields = [ - "event_type", - "relative_to", - "offset", - "unit", - "delivery_hour", - "direction", - "flow_to_start", - "flow_start_mode", - "message_start_mode", - ] - - self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) - - response = self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=non_lang_fields + ["eng"]) - self.assertEqual(3, len(response.context["form"].fields["message_start_mode"].choices)) - - # try to submit with missing fields - self.assertCreateSubmit( - create_url, - self.admin, - { - "event_type": "M", - "eng": "This is my message", - "direction": "A", - "offset": 1, - "unit": "W", - "delivery_hour": 13, - }, - form_errors={"message_start_mode": "This field is required."}, - ) - self.assertCreateSubmit( - create_url, - self.admin, - { - "event_type": "F", - "direction": "A", - "offset": 1, - "unit": "W", - "delivery_hour": 13, - }, - form_errors={"flow_start_mode": "This field is required.", "flow_to_start": "This field is required."}, - ) - - # can create an event with just a eng translation - self.assertCreateSubmit( - create_url, - self.admin, - { - "relative_to": planting_date.id, - "event_type": "M", - "eng": "This is my message", - "direction": "A", - "offset": 1, - "unit": "W", - "flow_to_start": "", - "delivery_hour": 13, - "message_start_mode": "I", - }, - new_obj_query=CampaignEvent.objects.filter(campaign=campaign, event_type="M"), - ) - - event1 = CampaignEvent.objects.get(campaign=campaign) - self.assertEqual({"eng": "This is my message"}, event1.message) - - # add another language to our org - self.org.set_flow_languages(self.admin, ["eng", "kin"]) - # self.org2.set_flow_languages(self.admin, ["fra", "spa"]) - - response = self.assertCreateFetch(create_url, [self.admin], form_fields=non_lang_fields + ["eng", "kin"]) - - # and our language list should be there - self.assertContains(response, "show_language") - - # have to submit translation for primary language - response = self.assertCreateSubmit( - create_url, - self.admin, - { - "relative_to": planting_date.id, - "event_type": "M", - "eng": "", - "kin": "muraho", - "direction": "B", - "offset": 2, - "unit": "W", - "flow_to_start": "", - "delivery_hour": 13, - "message_start_mode": "I", - }, - form_errors={"__all__": "A message is required for 'English'"}, - ) - - response = self.assertCreateSubmit( - create_url, - self.admin, - { - "relative_to": planting_date.id, - "event_type": "M", - "eng": "hello", - "kin": "muraho", - "direction": "B", - "offset": 2, - "unit": "W", - "flow_to_start": "", - "delivery_hour": 13, - "message_start_mode": "I", - }, - new_obj_query=CampaignEvent.objects.filter(campaign=campaign, event_type="M", offset=-2), - ) - - # should be redirected back to our campaign read page - self.assertRedirect(response, reverse("campaigns.campaign_read", args=[campaign.uuid])) - - event = CampaignEvent.objects.get(campaign=campaign, event_type="M", offset=-2) - self.assertEqual(-2, event.offset) - self.assertEqual(13, event.delivery_hour) - self.assertEqual("W", event.unit) - self.assertEqual("M", event.event_type) - self.assertEqual("I", event.start_mode) - - self.assertEqual("hello", event.get_message(contact=farmer1)) - self.assertEqual("muraho", event.get_message(contact=farmer2)) - self.assertEqual("hello", event.get_message()) - - self.assertTrue(event.flow.is_system) - self.assertEqual("eng", event.flow.base_language) - self.assertEqual(Flow.TYPE_BACKGROUND, event.flow.flow_type) - - flow_json = event.flow.get_definition() - action_uuid = flow_json["nodes"][0]["actions"][0]["uuid"] - - self.assertEqual( - { - "uuid": str(event.flow.uuid), - "name": f"Single Message ({event.id})", - "spec_version": "13.5.0", - "revision": 1, - "expire_after_minutes": 0, - "language": "eng", - "type": "messaging_background", - "localization": {"kin": {action_uuid: {"text": ["muraho"]}}}, - "nodes": [ - { - "uuid": matchers.UUID4String(), - "actions": [{"uuid": action_uuid, "type": "send_msg", "text": "hello"}], - "exits": [{"uuid": matchers.UUID4String()}], - } - ], - }, - flow_json, - ) - - update_url = reverse("campaigns.campaignevent_update", args=[event.id]) - - # update the event to be passive - response = self.assertUpdateSubmit( - update_url, - self.admin, - { - "relative_to": planting_date.id, - "event_type": "M", - "eng": "hello", - "kin": "muraho", - "direction": "B", - "offset": 3, - "unit": "W", - "flow_to_start": "", - "delivery_hour": 13, - "message_start_mode": "P", - }, - ) - - self.assertEqual(response.status_code, 302) - event = CampaignEvent.objects.get(is_active=True, offset=-3) - - self.assertEqual(-3, event.offset) - self.assertEqual(13, event.delivery_hour) - self.assertEqual("W", event.unit) - self.assertEqual("M", event.event_type) - self.assertEqual("P", event.start_mode) - - update_url = reverse("campaigns.campaignevent_update", args=[event.id]) - - # and add another language to org - self.org.set_flow_languages(self.admin, ["eng", "kin", "spa"]) - - response = self.client.get(update_url) - - self.assertEqual("hello", response.context["form"].fields["eng"].initial) - self.assertEqual("muraho", response.context["form"].fields["kin"].initial) - self.assertEqual("", response.context["form"].fields["spa"].initial) - self.assertEqual(2, len(response.context["form"].fields["flow_start_mode"].choices)) - - # 'Created On' system field must be selectable in the form - contact_fields = [field.key for field in response.context["form"].fields["relative_to"].queryset] - self.assertEqual(contact_fields, ["created_on", "last_seen_on", "planting_date", "registered"]) - - # translation in new language is optional - self.assertUpdateSubmit( - update_url, - self.admin, - { - "relative_to": planting_date.id, - "event_type": "M", - "eng": "Required", - "kin": "@fields.planting_date", - "spa": "", - "direction": "B", - "offset": 1, - "unit": "W", - "flow_to_start": "", - "delivery_hour": 13, - "message_start_mode": "I", - }, - ) - - event.flow.refresh_from_db() - - # we should retain our base language - self.assertEqual("eng", event.flow.base_language) - - # update org languages to something not including the flow's base language - self.org.set_flow_languages(self.admin, ["por", "kin"]) - - event = CampaignEvent.objects.all().order_by("id").last() - update_url = reverse("campaigns.campaignevent_update", args=[event.id]) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - - # should get new org primary language but also base language of flow - response = self.assertUpdateFetch( - update_url, [self.editor, self.admin], form_fields=non_lang_fields + ["por", "kin", "eng"] - ) - - self.assertEqual(response.context["form"].fields["por"].initial, "") - self.assertEqual(response.context["form"].fields["kin"].initial, "@fields.planting_date") - self.assertEqual(response.context["form"].fields["eng"].initial, "Required") - - def test_update(self): - event1, event2, event3 = self.campaign1.events.order_by("id") - other_org_event1 = self.other_org_campaign.events.order_by("id").first() - - update_url = reverse("campaigns.campaignevent_update", args=[event1.id]) - - # can't view update form if not logged in - response = self.client.get(update_url) - self.assertLoginRedirect(response) - - self.login(self.admin) - - response = self.client.get(update_url) - self.assertEqual( - [ - "event_type", - "relative_to", - "offset", - "unit", - "delivery_hour", - "direction", - "flow_to_start", - "flow_start_mode", - "message_start_mode", - "eng", - "kin", - "loc", - ], - list(response.context["form"].fields.keys()), - ) - - # can't view update form for event from other org - response = self.client.get(reverse("campaigns.campaignevent_update", args=[other_org_event1.id])) - self.assertLoginRedirect(response) - - accepted = self.create_field("accepted", "Accepted", value_type="D") - - # update the first event - response = self.client.post( - update_url, - { - "relative_to": accepted.id, - "event_type": "M", - "eng": "Hi there", - "direction": "B", - "offset": 2, - "unit": "D", - "flow_to_start": "", - "delivery_hour": 11, - "message_start_mode": "I", - }, - ) - self.assertEqual(302, response.status_code) - - # original event will be unchanged.. except to be inactive - event1.refresh_from_db() - self.assertEqual("F", event1.event_type) - self.assertFalse(event1.is_active) - - # but will have a new replacement event - new_event1 = self.campaign1.events.filter(id__gt=event2.id).last() - - self.assertEqual(accepted, new_event1.relative_to) - self.assertEqual("M", new_event1.event_type) - self.assertEqual(-2, new_event1.offset) - self.assertEqual("D", new_event1.unit) - - # can't update event in other org - response = self.client.post( - update_url, - { - "relative_to": other_org_event1.relative_to, - "event_type": "M", - "eng": "Hi there", - "direction": "B", - "offset": 2, - "unit": "D", - "flow_to_start": "", - "delivery_hour": 11, - }, - ) - self.assertEqual(404, response.status_code) - - # check event is unchanged - other_org_event1.refresh_from_db() - self.assertEqual("F", other_org_event1.event_type) - self.assertTrue(other_org_event1.is_active) - - # event based on background flow should show a warning for it's info text - update_url = reverse("campaigns.campaignevent_update", args=[event3.id]) - response = self.client.get(update_url) - self.assertEqual( - CampaignEventCRUDL.BACKGROUND_WARNING, - response.context["form"].fields["flow_to_start"].widget.attrs["info_text"], - ) - - def test_delete(self): - # update event to have a field dependency - event = self.campaign1.events.get(offset=1) - update_url = reverse("campaigns.campaignevent_update", args=[event.id]) - self.assertUpdateSubmit( - update_url, - self.admin, - { - "relative_to": event.relative_to.id, - "event_type": "M", - "eng": "This is my message @fields.registered", - "direction": "A", - "offset": 1, - "unit": "W", - "flow_to_start": "", - "delivery_hour": 13, - "message_start_mode": "I", - }, - ) - - event = self.campaign1.events.get(offset=1, is_active=True) - - self.assertEqual(1, event.flow.field_dependencies.count()) - - # delete the event - self.client.post(reverse("campaigns.campaignevent_delete", args=[event.id]), dict()) - self.assertFalse(CampaignEvent.objects.filter(id=event.id).first().is_active) - - # our single message flow should be released and take its dependencies with it - self.assertEqual(event.flow.field_dependencies.count(), 0) - - -class ArchiveWithDeletedGroupsTest(MigrationTest): - app = "campaigns" - migrate_from = "0059_squashed" - migrate_to = "0060_archive_deleted_groups" - - def setUpBeforeMigration(self, apps): - group1 = self.create_group("Group 1", contacts=[]) - group2 = self.create_group("Group 2", contacts=[]) - group2.release(self.admin) - - self.campaign1 = Campaign.create(self.org, self.admin, "Campaign 1", group1) - self.campaign2 = Campaign.create(self.org, self.admin, "Campaign 2", group2) - - def test_migration(self): - self.campaign1.refresh_from_db() - self.campaign2.refresh_from_db() - - self.assertFalse(self.campaign1.is_archived) - self.assertTrue(self.campaign2.is_archived) diff --git a/temba/campaigns/tests/test_campaigncrudl.py b/temba/campaigns/tests/test_campaigncrudl.py new file mode 100644 index 00000000000..912aeb57da2 --- /dev/null +++ b/temba/campaigns/tests/test_campaigncrudl.py @@ -0,0 +1,173 @@ +from django.urls import reverse + +from temba.campaigns.models import Campaign, CampaignEvent +from temba.tests import CRUDLTestMixin, TembaTest, matchers, mock_mailroom + + +class CampaignCRUDLTest(TembaTest, CRUDLTestMixin): + def setUp(self): + super().setUp() + + self.create_field("registered", "Registered", value_type="D") + self.create_field("registered", "Registered", value_type="D", org=self.org2) + + def create_campaign(self, org, name, group): + user = org.get_admins().first() + registered = org.fields.get(key="registered") + flow = self.create_flow(f"{name} Flow", org=org) + campaign = Campaign.create(org, user, name, group) + CampaignEvent.create_flow_event( + org, user, campaign, registered, offset=1, unit="W", flow=flow, delivery_hour="13" + ) + return campaign + + def test_menu(self): + menu_url = reverse("campaigns.campaign_menu") + + group = self.create_group("My Group", contacts=[]) + self.create_campaign(self.org, "My Campaign", group) + + self.assertRequestDisallowed(menu_url, [None, self.agent]) + self.assertPageMenu(menu_url, self.admin, ["Active (1)", "Archived (0)"]) + + def test_create(self): + group = self.create_group("Reporters", contacts=[]) + + create_url = reverse("campaigns.campaign_create") + + self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) + self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=["name", "group"]) + + # try to submit with no data + self.assertCreateSubmit( + create_url, + self.admin, + {}, + form_errors={"name": "This field is required.", "group": "This field is required."}, + ) + + # submit with valid data + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Reminders", "group": group.id}, + new_obj_query=Campaign.objects.filter(name="Reminders", group=group), + ) + + def test_read(self): + group = self.create_group("Reporters", contacts=[]) + campaign = self.create_campaign(self.org, "Welcomes", group) + read_url = reverse("campaigns.campaign_read", args=[campaign.uuid]) + + self.assertRequestDisallowed(read_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(read_url, [self.user, self.editor, self.admin], context_object=campaign) + self.assertContains(response, "Welcomes") + self.assertContains(response, "Registered") + + self.assertContentMenu(read_url, self.admin, ["New Event", "Edit", "Export", "Archive"]) + + campaign.archive(self.admin) + + self.assertContentMenu(read_url, self.admin, ["Activate", "Export"]) + + def test_archive_and_activate(self): + group = self.create_group("Reporters", contacts=[]) + campaign = self.create_campaign(self.org, "Welcomes", group) + other_org_group = self.create_group("Reporters", contacts=[], org=self.org2) + other_org_campaign = self.create_campaign(self.org2, "Welcomes", other_org_group) + + archive_url = reverse("campaigns.campaign_archive", args=[campaign.id]) + + # can't archive campaign if not logged in + response = self.client.post(archive_url) + self.assertLoginRedirect(response) + + self.login(self.admin) + + response = self.client.post(archive_url) + self.assertEqual(302, response.status_code) + + campaign.refresh_from_db() + self.assertTrue(campaign.is_archived) + + # activate that archve + response = self.client.post(reverse("campaigns.campaign_activate", args=[campaign.id])) + self.assertEqual(302, response.status_code) + + campaign.refresh_from_db() + self.assertFalse(campaign.is_archived) + + # can't archive campaign from other org + response = self.client.post(reverse("campaigns.campaign_archive", args=[other_org_campaign.id])) + self.assertEqual(302, response.status_code) + + # check object is unchanged + other_org_campaign.refresh_from_db() + self.assertFalse(other_org_campaign.is_archived) + + @mock_mailroom + def test_update(self, mr_mocks): + group1 = self.create_group("Reporters", contacts=[]) + group2 = self.create_group("Testers", query="tester=1") + + campaign = self.create_campaign(self.org, "Welcomes", group1) + + update_url = reverse("campaigns.campaign_update", args=[campaign.id]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch( + update_url, [self.editor, self.admin], form_fields={"name": "Welcomes", "group": group1.id} + ) + + # try to submit with empty name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "", "group": group1.id}, + form_errors={"name": "This field is required."}, + object_unchanged=campaign, + ) + + # submit with valid name + self.assertUpdateSubmit(update_url, self.admin, {"name": "Greetings", "group": group1.id}, success_status=200) + + campaign.refresh_from_db() + self.assertEqual("Greetings", campaign.name) + self.assertEqual(group1, campaign.group) + + # group didn't change so should only have dynamic group creation queued + self.assertEqual(1, len(mr_mocks.queued_batch_tasks)) + + # submit with group change + self.assertUpdateSubmit(update_url, self.admin, {"name": "Greetings", "group": group2.id}, success_status=200) + + campaign.refresh_from_db() + self.assertEqual("Greetings", campaign.name) + self.assertEqual(group2, campaign.group) + + # should have a task queued to reschedule the campaign's event + self.assertEqual(2, len(mr_mocks.queued_batch_tasks)) + self.assertEqual( + { + "type": "schedule_campaign_event", + "org_id": self.org.id, + "task": {"campaign_event_id": campaign.events.filter(is_active=True).get().id, "org_id": self.org.id}, + "queued_on": matchers.Datetime(), + }, + mr_mocks.queued_batch_tasks[1], + ) + + def test_list(self): + group = self.create_group("Reporters", contacts=[]) + campaign1 = self.create_campaign(self.org, "Welcomes", group) + campaign2 = self.create_campaign(self.org, "Follow Ups", group) + + other_org_group = self.create_group("Reporters", contacts=[], org=self.org2) + self.create_campaign(self.org2, "Welcomes", other_org_group) + + list_url = reverse("campaigns.campaign_list") + + self.assertRequestDisallowed(list_url, [None, self.agent]) + self.assertListFetch(list_url, [self.user, self.editor, self.admin], context_objects=[campaign2, campaign1]) + self.assertContentMenu(list_url, self.user, []) + self.assertContentMenu(list_url, self.admin, ["New Campaign"]) diff --git a/temba/campaigns/tests/test_eventcrudl.py b/temba/campaigns/tests/test_eventcrudl.py new file mode 100644 index 00000000000..cb7bedcae42 --- /dev/null +++ b/temba/campaigns/tests/test_eventcrudl.py @@ -0,0 +1,444 @@ +from django.urls import reverse + +from temba.campaigns.models import Campaign, CampaignEvent +from temba.campaigns.views import CampaignEventCRUDL +from temba.contacts.models import ContactField +from temba.flows.models import Flow +from temba.tests import CRUDLTestMixin, TembaTest, matchers +from temba.utils.views.mixins import TEMBA_MENU_SELECTION + + +class CampaignEventCRUDLTest(TembaTest, CRUDLTestMixin): + def setUp(self): + super().setUp() + + self.create_field("registered", "Registered", value_type="D") + + self.campaign1 = self.create_campaign(self.org, "Welcomes") + self.other_org_campaign = self.create_campaign(self.org2, "Welcomes") + + def create_campaign(self, org, name): + user = org.get_admins().first() + group = self.create_group("Reporters", contacts=[], org=org) + registered = self.org.fields.get(key="registered") + campaign = Campaign.create(org, user, name, group) + flow = self.create_flow(f"{name} Flow", org=org) + background_flow = self.create_flow(f"{name} Background Flow", org=org, flow_type=Flow.TYPE_BACKGROUND) + CampaignEvent.create_flow_event( + org, user, campaign, registered, offset=1, unit="W", flow=flow, delivery_hour="13" + ) + CampaignEvent.create_flow_event( + org, user, campaign, registered, offset=2, unit="W", flow=flow, delivery_hour="13" + ) + CampaignEvent.create_flow_event( + org, user, campaign, registered, offset=2, unit="W", flow=background_flow, delivery_hour="13" + ) + return campaign + + def test_read(self): + event = self.campaign1.events.order_by("id").first() + read_url = reverse("campaigns.campaignevent_read", args=[event.campaign.uuid, event.id]) + + self.assertRequestDisallowed(read_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(read_url, [self.user, self.editor, self.admin], context_object=event) + + self.assertContains(response, "Welcomes") + self.assertContains(response, "1 week after") + self.assertContains(response, "Registered") + self.assertEqual("/campaign/active/", response.headers.get(TEMBA_MENU_SELECTION)) + self.assertContentMenu(read_url, self.admin, ["Edit", "Delete"]) + + event.campaign.is_archived = True + event.campaign.save() + + # archived campaigns should focus the archived menu + response = self.assertReadFetch(read_url, [self.editor], context_object=event) + self.assertEqual("/campaign/archived/", response.headers.get(TEMBA_MENU_SELECTION)) + + self.assertContentMenu(read_url, self.admin, ["Delete"]) + + def test_create(self): + farmer1 = self.create_contact("Rob Jasper", phone="+250788111111") + farmer2 = self.create_contact("Mike Gordon", phone="+250788222222", language="kin") + self.create_contact("Trey Anastasio", phone="+250788333333") + farmers = self.create_group("Farmers", [farmer1, farmer2]) + + # create a contact field for our planting date + planting_date = self.create_field("planting_date", "Planting Date", ContactField.TYPE_DATETIME) + + # update the planting date for our contacts + self.set_contact_field(farmer1, "planting_date", "1/10/2020") + + # create a campaign for our farmers group + campaign = Campaign.create(self.org, self.admin, "Planting Reminders", farmers) + + create_url = f"{reverse('campaigns.campaignevent_create')}?campaign={campaign.id}" + + # update org to use a single flow language + self.org.set_flow_languages(self.admin, ["eng"]) + + non_lang_fields = [ + "event_type", + "relative_to", + "offset", + "unit", + "delivery_hour", + "direction", + "flow_to_start", + "flow_start_mode", + "message_start_mode", + ] + + self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) + + response = self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=non_lang_fields + ["eng"]) + self.assertEqual(3, len(response.context["form"].fields["message_start_mode"].choices)) + + # try to submit with missing fields + self.assertCreateSubmit( + create_url, + self.admin, + { + "event_type": "M", + "eng": "This is my message", + "direction": "A", + "offset": 1, + "unit": "W", + "delivery_hour": 13, + }, + form_errors={"message_start_mode": "This field is required."}, + ) + self.assertCreateSubmit( + create_url, + self.admin, + { + "event_type": "F", + "direction": "A", + "offset": 1, + "unit": "W", + "delivery_hour": 13, + }, + form_errors={"flow_start_mode": "This field is required.", "flow_to_start": "This field is required."}, + ) + + # can create an event with just a eng translation + self.assertCreateSubmit( + create_url, + self.admin, + { + "relative_to": planting_date.id, + "event_type": "M", + "eng": "This is my message", + "direction": "A", + "offset": 1, + "unit": "W", + "flow_to_start": "", + "delivery_hour": 13, + "message_start_mode": "I", + }, + new_obj_query=CampaignEvent.objects.filter(campaign=campaign, event_type="M"), + ) + + event1 = CampaignEvent.objects.get(campaign=campaign) + self.assertEqual({"eng": "This is my message"}, event1.message) + + # add another language to our org + self.org.set_flow_languages(self.admin, ["eng", "kin"]) + # self.org2.set_flow_languages(self.admin, ["fra", "spa"]) + + response = self.assertCreateFetch(create_url, [self.admin], form_fields=non_lang_fields + ["eng", "kin"]) + + # and our language list should be there + self.assertContains(response, "show_language") + + # have to submit translation for primary language + response = self.assertCreateSubmit( + create_url, + self.admin, + { + "relative_to": planting_date.id, + "event_type": "M", + "eng": "", + "kin": "muraho", + "direction": "B", + "offset": 2, + "unit": "W", + "flow_to_start": "", + "delivery_hour": 13, + "message_start_mode": "I", + }, + form_errors={"__all__": "A message is required for 'English'"}, + ) + + response = self.assertCreateSubmit( + create_url, + self.admin, + { + "relative_to": planting_date.id, + "event_type": "M", + "eng": "hello", + "kin": "muraho", + "direction": "B", + "offset": 2, + "unit": "W", + "flow_to_start": "", + "delivery_hour": 13, + "message_start_mode": "I", + }, + new_obj_query=CampaignEvent.objects.filter(campaign=campaign, event_type="M", offset=-2), + ) + + # should be redirected back to our campaign read page + self.assertRedirect(response, reverse("campaigns.campaign_read", args=[campaign.uuid])) + + event = CampaignEvent.objects.get(campaign=campaign, event_type="M", offset=-2) + self.assertEqual(-2, event.offset) + self.assertEqual(13, event.delivery_hour) + self.assertEqual("W", event.unit) + self.assertEqual("M", event.event_type) + self.assertEqual("I", event.start_mode) + + self.assertEqual("hello", event.get_message(contact=farmer1)) + self.assertEqual("muraho", event.get_message(contact=farmer2)) + self.assertEqual("hello", event.get_message()) + + self.assertTrue(event.flow.is_system) + self.assertEqual("eng", event.flow.base_language) + self.assertEqual(Flow.TYPE_BACKGROUND, event.flow.flow_type) + + flow_json = event.flow.get_definition() + action_uuid = flow_json["nodes"][0]["actions"][0]["uuid"] + + self.assertEqual( + { + "uuid": str(event.flow.uuid), + "name": f"Single Message ({event.id})", + "spec_version": Flow.CURRENT_SPEC_VERSION, + "revision": 1, + "expire_after_minutes": 0, + "language": "eng", + "type": "messaging_background", + "localization": {"kin": {action_uuid: {"text": ["muraho"]}}}, + "nodes": [ + { + "uuid": matchers.UUID4String(), + "actions": [{"uuid": action_uuid, "type": "send_msg", "text": "hello"}], + "exits": [{"uuid": matchers.UUID4String()}], + } + ], + }, + flow_json, + ) + + update_url = reverse("campaigns.campaignevent_update", args=[event.id]) + + # update the event to be passive + response = self.assertUpdateSubmit( + update_url, + self.admin, + { + "relative_to": planting_date.id, + "event_type": "M", + "eng": "hello", + "kin": "muraho", + "direction": "B", + "offset": 3, + "unit": "W", + "flow_to_start": "", + "delivery_hour": 13, + "message_start_mode": "P", + }, + ) + + self.assertEqual(response.status_code, 302) + event = CampaignEvent.objects.get(is_active=True, offset=-3) + + self.assertEqual(-3, event.offset) + self.assertEqual(13, event.delivery_hour) + self.assertEqual("W", event.unit) + self.assertEqual("M", event.event_type) + self.assertEqual("P", event.start_mode) + + update_url = reverse("campaigns.campaignevent_update", args=[event.id]) + + # and add another language to org + self.org.set_flow_languages(self.admin, ["eng", "kin", "spa"]) + + response = self.client.get(update_url) + + self.assertEqual("hello", response.context["form"].fields["eng"].initial) + self.assertEqual("muraho", response.context["form"].fields["kin"].initial) + self.assertEqual("", response.context["form"].fields["spa"].initial) + self.assertEqual(2, len(response.context["form"].fields["flow_start_mode"].choices)) + + # 'Created On' system field must be selectable in the form + contact_fields = [field.key for field in response.context["form"].fields["relative_to"].queryset] + self.assertEqual(contact_fields, ["created_on", "last_seen_on", "planting_date", "registered"]) + + # translation in new language is optional + self.assertUpdateSubmit( + update_url, + self.admin, + { + "relative_to": planting_date.id, + "event_type": "M", + "eng": "Required", + "kin": "@fields.planting_date", + "spa": "", + "direction": "B", + "offset": 1, + "unit": "W", + "flow_to_start": "", + "delivery_hour": 13, + "message_start_mode": "I", + }, + ) + + event.flow.refresh_from_db() + + # we should retain our base language + self.assertEqual("eng", event.flow.base_language) + + # update org languages to something not including the flow's base language + self.org.set_flow_languages(self.admin, ["por", "kin"]) + + event = CampaignEvent.objects.all().order_by("id").last() + update_url = reverse("campaigns.campaignevent_update", args=[event.id]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + + # should get new org primary language but also base language of flow + response = self.assertUpdateFetch( + update_url, [self.editor, self.admin], form_fields=non_lang_fields + ["por", "kin", "eng"] + ) + + self.assertEqual(response.context["form"].fields["por"].initial, "") + self.assertEqual(response.context["form"].fields["kin"].initial, "@fields.planting_date") + self.assertEqual(response.context["form"].fields["eng"].initial, "Required") + + def test_update(self): + event1, event2, event3 = self.campaign1.events.order_by("id") + other_org_event1 = self.other_org_campaign.events.order_by("id").first() + + update_url = reverse("campaigns.campaignevent_update", args=[event1.id]) + + # can't view update form if not logged in + response = self.client.get(update_url) + self.assertLoginRedirect(response) + + self.login(self.admin) + + response = self.client.get(update_url) + self.assertEqual( + [ + "event_type", + "relative_to", + "offset", + "unit", + "delivery_hour", + "direction", + "flow_to_start", + "flow_start_mode", + "message_start_mode", + "eng", + "kin", + "loc", + ], + list(response.context["form"].fields.keys()), + ) + + # can't view update form for event from other org + response = self.client.get(reverse("campaigns.campaignevent_update", args=[other_org_event1.id])) + self.assertLoginRedirect(response) + + accepted = self.create_field("accepted", "Accepted", value_type="D") + + # update the first event + response = self.client.post( + update_url, + { + "relative_to": accepted.id, + "event_type": "M", + "eng": "Hi there", + "direction": "B", + "offset": 2, + "unit": "D", + "flow_to_start": "", + "delivery_hour": 11, + "message_start_mode": "I", + }, + ) + self.assertEqual(302, response.status_code) + + # original event will be unchanged.. except to be inactive + event1.refresh_from_db() + self.assertEqual("F", event1.event_type) + self.assertFalse(event1.is_active) + + # but will have a new replacement event + new_event1 = self.campaign1.events.filter(id__gt=event2.id).last() + + self.assertEqual(accepted, new_event1.relative_to) + self.assertEqual("M", new_event1.event_type) + self.assertEqual(-2, new_event1.offset) + self.assertEqual("D", new_event1.unit) + + # can't update event in other org + response = self.client.post( + update_url, + { + "relative_to": other_org_event1.relative_to, + "event_type": "M", + "eng": "Hi there", + "direction": "B", + "offset": 2, + "unit": "D", + "flow_to_start": "", + "delivery_hour": 11, + }, + ) + self.assertEqual(404, response.status_code) + + # check event is unchanged + other_org_event1.refresh_from_db() + self.assertEqual("F", other_org_event1.event_type) + self.assertTrue(other_org_event1.is_active) + + # event based on background flow should show a warning for it's info text + update_url = reverse("campaigns.campaignevent_update", args=[event3.id]) + response = self.client.get(update_url) + self.assertEqual( + CampaignEventCRUDL.BACKGROUND_WARNING, + response.context["form"].fields["flow_to_start"].widget.attrs["info_text"], + ) + + def test_delete(self): + # update event to have a field dependency + event = self.campaign1.events.get(offset=1) + update_url = reverse("campaigns.campaignevent_update", args=[event.id]) + self.assertUpdateSubmit( + update_url, + self.admin, + { + "relative_to": event.relative_to.id, + "event_type": "M", + "eng": "This is my message @fields.registered", + "direction": "A", + "offset": 1, + "unit": "W", + "flow_to_start": "", + "delivery_hour": 13, + "message_start_mode": "I", + }, + ) + + event = self.campaign1.events.get(offset=1, is_active=True) + + self.assertEqual(1, event.flow.field_dependencies.count()) + + # delete the event + self.client.post(reverse("campaigns.campaignevent_delete", args=[event.id]), dict()) + self.assertFalse(CampaignEvent.objects.filter(id=event.id).first().is_active) + + # our single message flow should be released and take its dependencies with it + self.assertEqual(event.flow.field_dependencies.count(), 0) diff --git a/temba/campaigns/views.py b/temba/campaigns/views.py index d7b5472b9ca..45630ccf58e 100644 --- a/temba/campaigns/views.py +++ b/temba/campaigns/views.py @@ -1,12 +1,4 @@ -from smartmin.views import ( - SmartCreateView, - SmartCRUDL, - SmartDeleteView, - SmartListView, - SmartReadView, - SmartTemplateView, - SmartUpdateView, -) +from smartmin.views import SmartCreateView, SmartCRUDL, SmartDeleteView, SmartReadView, SmartUpdateView from django import forms from django.contrib import messages @@ -19,10 +11,11 @@ from temba.contacts.models import ContactField, ContactGroup from temba.flows.models import Flow from temba.msgs.models import Msg -from temba.orgs.views import MenuMixin, ModalMixin, OrgFilterMixin, OrgObjPermsMixin, OrgPermsMixin +from temba.orgs.views.base import BaseListView, BaseMenuView, BaseReadView +from temba.orgs.views.mixins import BulkActionMixin, OrgObjPermsMixin, OrgPermsMixin from temba.utils import languages from temba.utils.fields import CompletionTextarea, InputWidget, SelectWidget, TembaChoiceField -from temba.utils.views import BulkActionMixin, ContentMenuMixin, SpaMixin +from temba.utils.views.mixins import ContextMenuMixin, ModalFormMixin, SpaMixin from .models import Campaign, CampaignEvent @@ -52,7 +45,7 @@ class CampaignCRUDL(SmartCRUDL): model = Campaign actions = ("create", "read", "update", "list", "archived", "archive", "activate", "menu") - class Menu(MenuMixin, SmartTemplateView): + class Menu(BaseMenuView): def derive_menu(self): org = self.request.org @@ -74,12 +67,13 @@ def derive_menu(self): icon="campaign_archived", count=org.campaigns.filter(is_active=True, is_archived=True).count(), href="campaigns.campaign_archived", + perm="campaigns.campaign_list", ) ) return menu - class Update(OrgObjPermsMixin, ModalMixin, SmartUpdateView): + class Update(ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): fields = ("name", "group") form_class = CampaignForm @@ -114,14 +108,14 @@ def form_valid(self, form): return self.render_modal_response(form) - class Read(SpaMixin, OrgObjPermsMixin, ContentMenuMixin, SmartReadView): + class Read(SpaMixin, ContextMenuMixin, BaseReadView): slug_url_kwarg = "uuid" menu_path = "/campaign/active" def derive_title(self): return self.object.name - def build_content_menu(self, menu): + def build_context_menu(self, menu): obj = self.get_object() if obj.is_archived: @@ -153,7 +147,7 @@ def build_content_menu(self, menu): if self.has_org_perm("campaigns.campaign_archive"): menu.add_url_post(_("Archive"), reverse("campaigns.campaign_archive", args=[obj.id])) - class Create(OrgPermsMixin, ModalMixin, SmartCreateView): + class Create(ModalFormMixin, OrgPermsMixin, SmartCreateView): fields = ("name", "group") form_class = CampaignForm success_url = "uuid@campaigns.campaign_read" @@ -168,30 +162,22 @@ def get_form_kwargs(self): kwargs["org"] = self.request.org return kwargs - class BaseList(SpaMixin, ContentMenuMixin, OrgFilterMixin, OrgPermsMixin, BulkActionMixin, SmartListView): + class BaseList(SpaMixin, ContextMenuMixin, BulkActionMixin, BaseListView): + permission = "campaigns.campaign_list" fields = ("name", "group") default_template = "campaigns/campaign_list.html" default_order = ("-modified_on",) - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context["org_has_campaigns"] = self.request.org.campaigns.exists() - context["request_url"] = self.request.path - return context - class List(BaseList): title = _("Active") - fields = ("name", "group") bulk_actions = ("archive",) search_fields = ("name__icontains", "group__name__icontains") menu_path = "/campaign/active" def get_queryset(self, *args, **kwargs): - qs = super().get_queryset(*args, **kwargs) - qs = qs.filter(is_active=True, is_archived=False) - return qs + return super().get_queryset(*args, **kwargs).filter(is_archived=False) - def build_content_menu(self, menu): + def build_context_menu(self, menu): if self.has_org_perm("campaigns.campaign_create"): menu.add_modax( _("New Campaign"), @@ -203,16 +189,13 @@ def build_content_menu(self, menu): class Archived(BaseList): title = _("Archived") - fields = ("name",) bulk_actions = ("restore",) menu_path = "/campaign/archived" def get_queryset(self, *args, **kwargs): - qs = super().get_queryset(*args, **kwargs) - qs = qs.filter(is_active=True, is_archived=True) - return qs + return super().get_queryset(*args, **kwargs).filter(is_archived=True) - class Archive(OrgFilterMixin, OrgPermsMixin, SmartUpdateView): + class Archive(OrgObjPermsMixin, SmartUpdateView): fields = () success_url = "uuid@campaigns.campaign_read" success_message = _("Campaign archived") @@ -221,7 +204,7 @@ def save(self, obj): obj.apply_action_archive(self.request.user, Campaign.objects.filter(id=obj.id)) return obj - class Activate(OrgFilterMixin, OrgPermsMixin, SmartUpdateView): + class Activate(OrgObjPermsMixin, SmartUpdateView): fields = () success_url = "uuid@campaigns.campaign_read" success_message = _("Campaign activated") @@ -481,7 +464,7 @@ class CampaignEventCRUDL(SmartCRUDL): "This is a background flow. When it triggers, it will run it for all contacts without interruption." ) - class Read(SpaMixin, OrgObjPermsMixin, ContentMenuMixin, SmartReadView): + class Read(SpaMixin, OrgObjPermsMixin, ContextMenuMixin, SmartReadView): @classmethod def derive_url_pattern(cls, path, action): return r"^%s/%s/(?P[0-9a-f-]+)/(?P\d+)/$" % (path, action) @@ -519,7 +502,7 @@ def get_context_data(self, **kwargs): return context - def build_content_menu(self, menu): + def build_context_menu(self, menu): obj = self.get_object() if self.has_org_perm("campaigns.campaignevent_update") and not obj.campaign.is_archived: @@ -538,7 +521,7 @@ def build_content_menu(self, menu): title=_("Delete Event"), ) - class Delete(ModalMixin, OrgObjPermsMixin, SmartDeleteView): + class Delete(ModalFormMixin, OrgObjPermsMixin, SmartDeleteView): default_template = "smartmin/delete_confirm.html" submit_button_name = _("Delete") fields = ("uuid",) @@ -559,7 +542,7 @@ def get_redirect_url(self): def get_cancel_url(self): # pragma: needs cover return reverse("campaigns.campaign_read", args=[self.object.campaign.uuid]) - class Update(OrgObjPermsMixin, ModalMixin, SmartUpdateView): + class Update(ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): form_class = CampaignEventForm default_fields = [ "event_type", @@ -658,7 +641,7 @@ def pre_save(self, obj): def get_success_url(self): return reverse("campaigns.campaignevent_read", args=[self.object.campaign.uuid, self.object.pk]) - class Create(OrgPermsMixin, ModalMixin, SmartCreateView): + class Create(ModalFormMixin, OrgPermsMixin, SmartCreateView): default_fields = [ "event_type", "flow_to_start", diff --git a/temba/channels/android/sync.py b/temba/channels/android/sync.py index fa16eb26d16..36d02c950d6 100644 --- a/temba/channels/android/sync.py +++ b/temba/channels/android/sync.py @@ -1,16 +1,7 @@ -import time from datetime import datetime, timezone as tzone -import google.auth.transport.requests -import requests -from google.oauth2 import service_account - -from django.conf import settings - from temba.msgs.models import Msg -from ..models import Channel - def get_sync_commands(msgs): """ @@ -53,76 +44,6 @@ def get_channel_commands(channel, commands, sync_event=None): return commands -def _get_access_token(): # pragma: no cover - """ - Retrieve a valid access token that can be used to authorize requests. - """ - credentials = service_account.Credentials.from_service_account_file( - settings.ANDROID_CREDENTIALS_FILE, scopes=["https://www.googleapis.com/auth/firebase.messaging"] - ) - request = google.auth.transport.requests.Request() - credentials.refresh(request) - return credentials.token - - -def validate_registration_info(registration_id): # pragma: no cover - valid_registration_ids = [] - - backoffs = [1, 3, 6] - while backoffs: - resp = requests.get( - f"https://iid.googleapis.com/iid/info/{registration_id}", - params={"details": "true"}, - headers={ - "Authorization": "Bearer " + _get_access_token(), - "access_token_auth": "true", - "Content-Type": "application/json", - }, - ) - - if resp.status_code == 200: - valid_registration_ids.append(registration_id) - break - else: - time.sleep(backoffs[0]) - backoffs = backoffs[1:] - - return valid_registration_ids - - -def sync_channel_fcm(registration_id, channel=None): # pragma: no cover - fcm_failed = False - try: - resp = requests.post( - f"https://fcm.googleapis.com/v1/projects/{settings.ANDROID_FCM_PROJECT_ID}/messages:send", - json={"message": {"token": registration_id, "data": {"msg": "sync"}}}, - headers={ - "Authorization": "Bearer " + _get_access_token(), - "Content-Type": "application/json", - }, - ) - - success = 0 - if resp.status_code == 200: - resp_json = resp.json() - success = resp_json.get("success", 0) - message_id = resp_json.get("message_id", None) - if message_id: - success = 1 - if not success: - fcm_failed = True - except requests.RequestException: - fcm_failed = True - - if fcm_failed: - valid_registration_ids = validate_registration_info(registration_id) - - if registration_id not in valid_registration_ids: - # this fcm id is invalid now, clear it out - channel.config.pop(Channel.CONFIG_FCM_ID, None) - channel.save(update_fields=["config"]) - - def update_message(msg, cmd): """ Updates a message according to the provided client command diff --git a/temba/channels/android/views.py b/temba/channels/android/views.py index bd229674e72..5c66f52e163 100644 --- a/temba/channels/android/views.py +++ b/temba/channels/android/views.py @@ -17,7 +17,7 @@ from temba.msgs.models import Msg from temba.notifications.incidents.builtin import ChannelOutdatedAppIncidentType from temba.notifications.models import Incident -from temba.utils import analytics, json +from temba.utils import json from ..models import Channel, SyncEvent from .claim import UnsupportedAndroidChannelError, get_or_create_channel @@ -50,8 +50,6 @@ def register(request): @csrf_exempt @transaction.non_atomic_requests def sync(request, channel_id): - start = time.time() - if request.method != "POST": return HttpResponse(status=500, content="POST Required") @@ -236,7 +234,4 @@ def sync(request, channel_id): sync_event.outgoing_command_count = len([_ for _ in outgoing_cmds if _["cmd"] != "ack"]) sync_event.save() - # keep track of how long a sync takes - analytics.gauges({"temba.relayer_sync": time.time() - start}) - return JsonResponse(result) diff --git a/temba/channels/migrations/0186_alter_channelcount_count.py b/temba/channels/migrations/0186_alter_channelcount_count.py new file mode 100644 index 00000000000..5f6a0b7d0bc --- /dev/null +++ b/temba/channels/migrations/0186_alter_channelcount_count.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.2 on 2024-12-04 18:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("channels", "0185_alter_channel_name"), + ] + + operations = [ + migrations.AlterField( + model_name="channelcount", + name="count", + field=models.IntegerField(), + ), + ] diff --git a/temba/channels/models.py b/temba/channels/models.py index 3560408effa..4e85341e858 100644 --- a/temba/channels/models.py +++ b/temba/channels/models.py @@ -1,11 +1,10 @@ +import itertools import logging from abc import ABCMeta from dataclasses import dataclass -from datetime import timedelta +from datetime import datetime, timedelta, timezone as tzone from enum import Enum -from urllib.parse import quote_plus from uuid import uuid4 -from xml.sax.saxutils import escape import phonenumbers from django_countries.fields import CountryField @@ -13,9 +12,7 @@ from smartmin.models import SmartModel from twilio.base.exceptions import TwilioRestException -from django.conf import settings from django.contrib.postgres.fields import ArrayField -from django.core.files.storage import storages from django.db import models from django.db.models import Q, Sum from django.db.models.signals import pre_save @@ -26,18 +23,12 @@ from django.utils.functional import cached_property from django.utils.translation import gettext_lazy as _ +from temba import mailroom from temba.orgs.models import DependencyMixin, Org -from temba.utils import analytics, get_anonymous_user, json, on_transaction_commit, redact -from temba.utils.models import ( - JSONAsTextField, - LegacyUUIDMixin, - SquashableModel, - TembaModel, - delete_in_batches, - generate_uuid, -) +from temba.utils import analytics, dynamo, get_anonymous_user, on_transaction_commit, redact +from temba.utils.models import JSONAsTextField, LegacyUUIDMixin, TembaModel, delete_in_batches, generate_uuid +from temba.utils.models.counts import BaseSquashableCount from temba.utils.text import generate_secret -from temba.utils.uuid import is_uuid logger = logging.getLogger(__name__) @@ -553,9 +544,6 @@ def get_address_display(self, e164=False): # the number may be alphanumeric in the case of short codes pass - elif URN.TWITTER_SCHEME in self.schemes: - return "@%s" % self.address - elif URN.FACEBOOK_SCHEME in self.schemes: return "%s (%s)" % (self.config.get(Channel.CONFIG_PAGE_NAME, self.name), self.address) @@ -648,7 +636,11 @@ def release(self, user, *, trigger_sync: bool = True): # any triggers associated with our channel get archived and released for trigger in self.triggers.filter(is_active=True): - trigger.archive(user) + try: + trigger.archive(user) + except Exception as e: + logger.error(f"Unable to deactivate a channel trigger: {str(e)}", exc_info=True) + trigger.release(user) # any open incidents are ended @@ -678,52 +670,23 @@ def trigger_sync(self): # pragma: no cover """ assert self.is_android, "can only trigger syncs on Android channels" + mailroom.get_client().android_sync(self) - from .tasks import sync_channel_fcm_task - - # androids sync via FCM - fcm_id = self.config.get(Channel.CONFIG_FCM_ID) - - if fcm_id and settings.ANDROID_FCM_PROJECT_ID and settings.ANDROID_CREDENTIALS_FILE: - on_transaction_commit(lambda: sync_channel_fcm_task.delay(fcm_id, channel_id=self.id)) - - @classmethod - def replace_variables(cls, text, variables, content_type=CONTENT_TYPE_URLENCODED): - for key in variables.keys(): - replacement = str(variables[key]) - - # encode based on our content type - if content_type == Channel.CONTENT_TYPE_URLENCODED: - replacement = quote_plus(replacement) - - # if this is JSON, need to wrap in quotes (and escape them) - elif content_type == Channel.CONTENT_TYPE_JSON: - replacement = json.dumps(replacement) - - # XML needs to be escaped - elif content_type == Channel.CONTENT_TYPE_XML: - replacement = escape(replacement) - - text = text.replace("{{%s}}" % key, replacement) - - return text - - def get_count(self, count_types): - count = ( - ChannelCount.objects.filter(channel=self, count_type__in=count_types) - .aggregate(Sum("count")) - .get("count__sum", 0) - ) + def get_count(self, count_types, since=None): + qs = ChannelCount.objects.filter(channel=self, count_type__in=count_types) + if since: + qs = qs.filter(day__gte=since) + count = qs.aggregate(Sum("count")).get("count__sum", 0) return 0 if count is None else count - def get_msg_count(self): - return self.get_count([ChannelCount.INCOMING_MSG_TYPE, ChannelCount.OUTGOING_MSG_TYPE]) + def get_msg_count(self, since=None): + return self.get_count([ChannelCount.INCOMING_MSG_TYPE, ChannelCount.OUTGOING_MSG_TYPE], since) - def get_ivr_count(self): + def get_ivr_count(self, since=None): return self.get_count([ChannelCount.INCOMING_IVR_TYPE, ChannelCount.OUTGOING_IVR_TYPE]) - def get_log_count(self): + def get_log_count(self, since=None): return self.get_count([ChannelCount.SUCCESS_LOG_TYPE, ChannelCount.ERROR_LOG_TYPE]) class Meta: @@ -738,7 +701,7 @@ class Meta: ] -class ChannelCount(SquashableModel): +class ChannelCount(BaseSquashableCount): """ This model is maintained by Postgres triggers and maintains the daily counts of messages and ivr interactions on each day. This allows for fast visualizations of activity on the channel read page as well as summaries @@ -768,16 +731,14 @@ class ChannelCount(SquashableModel): channel = models.ForeignKey(Channel, on_delete=models.PROTECT, related_name="counts") count_type = models.CharField(choices=COUNT_TYPE_CHOICES, max_length=2) day = models.DateField(null=True) - count = models.IntegerField(default=0) @classmethod def get_day_count(cls, channel, count_type, day): - counts = cls.objects.filter(channel=channel, count_type=count_type, day=day).order_by("day", "count_type") - return cls.sum(counts) + return cls.objects.filter(channel=channel, count_type=count_type, day=day).order_by("day", "count_type").sum() @classmethod - def get_squash_query(cls, distinct_set): - if distinct_set.day: + def get_squash_query(cls, distinct_set: dict) -> tuple: + if distinct_set["day"]: sql = """ WITH removed as ( DELETE FROM %(table)s WHERE "channel_id" = %%s AND "count_type" = %%s AND "day" = %%s RETURNING "count" @@ -788,7 +749,7 @@ def get_squash_query(cls, distinct_set): "table": cls._meta.db_table } - params = (distinct_set.channel_id, distinct_set.count_type, distinct_set.day) * 2 + params = (distinct_set["channel_id"], distinct_set["count_type"], distinct_set["day"]) * 2 else: sql = """ WITH removed as ( @@ -800,7 +761,7 @@ def get_squash_query(cls, distinct_set): "table": cls._meta.db_table } - params = (distinct_set.channel_id, distinct_set.count_type) * 2 + params = (distinct_set["channel_id"], distinct_set["count_type"]) * 2 return sql, params @@ -874,6 +835,7 @@ class ChannelLog(models.Model): A log of an interaction with a channel """ + DYNAMO_TABLE = "ChannelLogs" # unprefixed table name REDACT_MASK = "*" * 8 # used to mask redacted values LOG_TYPE_UNKNOWN = "unknown" @@ -920,91 +882,102 @@ class ChannelLog(models.Model): elapsed_ms = models.IntegerField(default=0) created_on = models.DateTimeField(default=timezone.now) + @classmethod + def get_by_uuid(cls, channel, uuids: list) -> list: + """ + Get logs from DynamoDB and converts them to non-persistent instances of this class + """ + if not uuids: + return [] + + client = dynamo.get_client() + logs = [] + + for uuid_batch in itertools.batched(uuids, 100): + resp = client.batch_get_item( + RequestItems={ + dynamo.table_name(cls.DYNAMO_TABLE): {"Keys": [{"UUID": {"S": str(u)}} for u in uuid_batch]} + } + ) + + for log in resp["Responses"][dynamo.table_name(cls.DYNAMO_TABLE)]: + data = dynamo.load_jsongz(log["DataGZ"]["B"]) + logs.append( + ChannelLog( + uuid=log["UUID"]["S"], + channel=channel, + log_type=log["Type"]["S"], + http_logs=data["http_logs"], + errors=data["errors"], + elapsed_ms=int(log["ElapsedMS"]["N"]), + created_on=datetime.fromtimestamp(int(log["CreatedOn"]["N"]), tz=tzone.utc), + ) + ) + + return sorted(logs, key=lambda l: l.uuid) + def get_display(self, *, anonymize: bool, urn) -> dict: - return self.display(self._get_json(), anonymize=anonymize, channel=self.channel, urn=urn) + """ + Gets a dict representation of this log for display that is optionally anonymized + """ - @classmethod - def display(cls, data: dict, *, anonymize: bool, channel, urn) -> dict: # add reference URLs to errors - for err in data["errors"]: + errors = [e.copy() for e in self.errors or []] + for err in errors: ext_code = err.get("ext_code") - err["ref_url"] = channel.type.get_error_ref_url(channel, ext_code) if ext_code else None + err["ref_url"] = self.channel.type.get_error_ref_url(self.channel, ext_code) if ext_code else None + + data = { + "uuid": str(self.uuid), + "type": self.log_type, + "http_logs": [h.copy() for h in self.http_logs or []], + "errors": errors, + "elapsed_ms": self.elapsed_ms, + "created_on": self.created_on.isoformat(), + } if anonymize: - cls._anonymize(data, channel, urn) + self._anonymize(data, urn) # out of an abundance of caution, check that we're not returning one of our own credential values for log in data["http_logs"]: - for secret in channel.type.get_redact_values(channel): - assert secret not in log["url"] and secret not in log["request"] and secret not in log["response"] + for secret in self.channel.type.get_redact_values(self.channel): + assert ( + secret not in log["url"] and secret not in log["request"] and secret not in log.get("response", "") + ) return data - @classmethod - def _anonymize_value(cls, original: str, urn, redact_keys=()) -> str: + def _anonymize(self, data: dict, urn): + request_keys = self.channel.type.redact_request_keys + response_keys = self.channel.type.redact_response_keys + + for http_log in data["http_logs"]: + http_log["url"] = self._anonymize_value(http_log["url"], urn) + http_log["request"] = self._anonymize_value(http_log["request"], urn, redact_keys=request_keys) + http_log["response"] = self._anonymize_value(http_log.get("response", ""), urn, redact_keys=response_keys) + + for err in data["errors"]: + err["message"] = self._anonymize_value(err["message"], urn) + + def _anonymize_value(self, original: str, urn, redact_keys=()) -> str: # if log doesn't have an associated URN then we don't know what to anonymize, so redact completely if not original: return "" if not urn: - return original[:10] + cls.REDACT_MASK + return original[:10] + self.REDACT_MASK if redact_keys: - redacted = redact.http_trace(original, urn.path, cls.REDACT_MASK, redact_keys) + redacted = redact.http_trace(original, urn.path, self.REDACT_MASK, redact_keys) else: - redacted = redact.text(original, urn.path, cls.REDACT_MASK) + redacted = redact.text(original, urn.path, self.REDACT_MASK) # if nothing was redacted, don't risk returning sensitive information we didn't find if original == redacted and original: - return original[:10] + cls.REDACT_MASK + return original[:10] + self.REDACT_MASK return redacted - @classmethod - def _anonymize(cls, data: dict, channel, urn): - request_keys = channel.type.redact_request_keys - response_keys = channel.type.redact_response_keys - - for http_log in data["http_logs"]: - http_log["url"] = cls._anonymize_value(http_log["url"], urn) - http_log["request"] = cls._anonymize_value(http_log["request"], urn, redact_keys=request_keys) - http_log["response"] = cls._anonymize_value(http_log.get("response", ""), urn, redact_keys=response_keys) - - for err in data["errors"]: - err["message"] = cls._anonymize_value(err["message"], urn) - - @classmethod - def get_logs(cls, channel, uuids: list) -> list: - # look for logs in the database - logs = {l.uuid: l._get_json() for l in cls.objects.filter(channel=channel, uuid__in=uuids)} - - # and in storage - for log_uuid in uuids: - assert is_uuid(log_uuid), f"{log_uuid} is not a valid log UUID" - - if log_uuid not in logs: - key = f"channels/{channel.uuid}/{str(log_uuid)[0:4]}/{log_uuid}.json" - try: - log_file = storages["logs"].open(key) - logs[log_uuid] = json.loads(log_file.read()) - log_file.close() - except Exception: - logger.exception("unable to read log from storage", extra={"key": key}) - - return sorted(logs.values(), key=lambda l: l["created_on"]) - - def _get_json(self): - """ - Get a database instance in the same JSON format we write to S3 - """ - return { - "uuid": str(self.uuid), - "type": self.log_type, - "http_logs": [h.copy() for h in self.http_logs or []], - "errors": [e.copy() for e in self.errors or []], - "elapsed_ms": self.elapsed_ms, - "created_on": self.created_on.isoformat(), - } - class Meta: indexes = [models.Index(name="channellogs_by_channel", fields=("channel", "-created_on"))] diff --git a/temba/channels/tasks.py b/temba/channels/tasks.py index af29b521291..d8ceaea7aee 100644 --- a/temba/channels/tasks.py +++ b/temba/channels/tasks.py @@ -13,19 +13,12 @@ from temba.utils.crons import cron_task from temba.utils.models import delete_in_batches -from .android import sync from .models import Channel, ChannelCount, ChannelEvent, ChannelLog, SyncEvent from .types.android import AndroidType logger = logging.getLogger(__name__) -@shared_task -def sync_channel_fcm_task(cloud_registration_id, channel_id=None): # pragma: no cover - channel = Channel.objects.filter(id=channel_id).first() - sync.sync_channel_fcm(cloud_registration_id, channel) - - @cron_task() def check_android_channels(): from temba.notifications.incidents.builtin import ChannelDisconnectedIncidentType diff --git a/temba/channels/tests.py b/temba/channels/tests.py index 049201b8513..c88d2ec3175 100644 --- a/temba/channels/tests.py +++ b/temba/channels/tests.py @@ -1,7 +1,6 @@ import base64 import hashlib import hmac -import io import time from datetime import date, datetime, timedelta, timezone as tzone from unittest.mock import patch @@ -12,7 +11,6 @@ from django.conf import settings from django.contrib.auth.models import Group from django.core import mail -from django.core.files.storage import storages from django.test.utils import override_settings from django.urls import reverse from django.utils import timezone @@ -32,7 +30,7 @@ from temba.triggers.models import Trigger from temba.utils import json from temba.utils.models import generate_uuid -from temba.utils.views import TEMBA_MENU_SELECTION +from temba.utils.views.mixins import TEMBA_MENU_SELECTION from .models import Channel, ChannelCount, ChannelEvent, ChannelLog, SyncEvent from .tasks import ( @@ -54,7 +52,9 @@ def setUp(self): self.tel_channel = self.create_channel( "A", "Test Channel", "+250785551212", country="RW", secret="12345", config={"FCM_ID": "123"} ) - self.twitter_channel = self.create_channel("TWT", "Twitter Channel", "billy_bob") + self.facebook_channel = self.create_channel( + "FBA", "Facebook Channel", "12345", config={Channel.CONFIG_PAGE_NAME: "Test page"} + ) self.unclaimed_channel = self.create_channel("NX", "Unclaimed Channel", "", config={"FCM_ID": "000"}) self.unclaimed_channel.org = None @@ -100,7 +100,7 @@ def test_get_address_display(self): self.assertEqual("+250 785 551 212", self.tel_channel.get_address_display()) self.assertEqual("+250785551212", self.tel_channel.get_address_display(e164=True)) - self.assertEqual("@billy_bob", self.twitter_channel.get_address_display()) + self.assertEqual("Test page (12345)", self.facebook_channel.get_address_display()) # make sure it works with alphanumeric numbers self.tel_channel.address = "EATRIGHT" @@ -178,7 +178,7 @@ def test_release(self, mr_mocks): channel2 = Channel.create(self.org, self.user, "", "T", "Test Channel", "0785553333") # add channel trigger - flow = self.get_flow("color") + flow = self.create_flow("Test") Trigger.create(self.org, self.admin, Trigger.TYPE_CATCH_ALL, flow, channel=channel1) # create some activity on this channel @@ -267,6 +267,34 @@ def test_release(self, mr_mocks): self.assertFalse(Channel.objects.filter(id=channel1.id).exists()) + @mock_mailroom + def test_release_facebook(self, mr_mocks): + channel = Channel.create( + self.org, + self.admin, + None, + "FBA", + name="Facebook", + address="12345", + role="SR", + schemes=["facebook"], + config={"auth_token": "09876543"}, + ) + + flow = self.create_flow("Test") + with patch("requests.post") as mock_post: + mock_post.return_value = MockResponse(200, json.dumps({"success": True})) + Trigger.create(self.org, self.admin, Trigger.TYPE_NEW_CONVERSATION, flow, channel=channel) + self.assertEqual(1, channel.triggers.filter(is_active=True).count()) + + with patch("requests.delete") as mock_delete: + mock_delete.return_value = MockResponse(400, "error") + + channel.release(self.admin) + self.assertEqual(0, channel.triggers.filter(is_active=True).count()) + self.assertEqual(1, channel.triggers.filter(is_active=False).count()) + self.assertFalse(channel.is_active) + @mock_mailroom def test_release_android(self, mr_mocks): android = self.claim_new_android() @@ -325,7 +353,7 @@ def test_chart(self): self.create_incoming_msg(joe, "This incoming message will be counted", channel=self.tel_channel) self.create_outgoing_msg(joe, "This outgoing message will be counted", channel=self.tel_channel) - response = self.fetch_protected(chart_url, self.admin) + response = self.requestView(chart_url, self.admin) chart = response.json() # an entry for each incoming and outgoing @@ -370,11 +398,11 @@ def test_read(self): self.assertEqual(f"/settings/channels/{self.tel_channel.uuid}", response.headers[TEMBA_MENU_SELECTION]) # org users can - response = self.fetch_protected(tel_channel_read_url, self.user) + response = self.requestView(tel_channel_read_url, self.user) self.assertTrue(len(response.context["latest_sync_events"]) <= 5) - response = self.fetch_protected(tel_channel_read_url, self.admin) + response = self.requestView(tel_channel_read_url, self.admin) self.assertContains(response, self.tel_channel.name) test_date = datetime(2020, 1, 20, 0, 0, 0, 0, tzone.utc) @@ -395,7 +423,7 @@ def test_read(self): self.create_outgoing_msg(bob, "delayed message", status=Msg.STATUS_QUEUED, channel=self.tel_channel) with patch("django.utils.timezone.now", return_value=test_date): - response = self.fetch_protected(tel_channel_read_url, self.admin) + response = self.requestView(tel_channel_read_url, self.admin) self.assertIn("delayed_sync_event", response.context_data.keys()) self.assertIn("unsent_msgs_count", response.context_data.keys()) @@ -415,7 +443,7 @@ def test_read(self): self.create_outgoing_msg(joe, "This outgoing message will be counted", channel=self.tel_channel) # now we have an inbound message and two outbounds - response = self.fetch_protected(tel_channel_read_url, self.admin) + response = self.requestView(tel_channel_read_url, self.admin) self.assertEqual(200, response.status_code) # message stats table have an inbound and two outbounds in the last month @@ -432,7 +460,7 @@ def test_read(self): # now let's create an ivr interaction self.create_incoming_msg(joe, "incoming ivr", channel=self.tel_channel, voice=True) self.create_outgoing_msg(joe, "outgoing ivr", channel=self.tel_channel, voice=True) - response = self.fetch_protected(tel_channel_read_url, self.admin) + response = self.requestView(tel_channel_read_url, self.admin) self.assertEqual(1, len(response.context["message_stats_table"])) self.assertEqual(1, response.context["message_stats_table"][0]["incoming_messages_count"]) @@ -442,7 +470,7 @@ def test_read(self): # look at the chart for our messages chart_url = reverse("channels.channel_chart", args=[self.tel_channel.uuid]) - response = self.fetch_protected(chart_url, self.admin) + response = self.requestView(chart_url, self.admin) # incoming, outgoing for both text and our ivr messages self.assertEqual(4, len(response.json()["series"])) @@ -974,8 +1002,7 @@ def test_configuration(self): config_url = reverse("channels.channel_configuration", args=[self.ex_channel.uuid]) # can't view configuration if not logged in - response = self.client.get(config_url) - self.assertLoginRedirect(response) + self.assertRequestDisallowed(config_url, [None, self.agent]) self.login(self.admin) @@ -989,7 +1016,7 @@ def test_configuration(self): # can't view configuration of channel in other org response = self.client.get(reverse("channels.channel_configuration", args=[self.other_org_channel.uuid])) - self.assertLoginRedirect(response) + self.assertEqual(response.status_code, 404) def test_update(self): android_channel = self.create_channel( @@ -1063,7 +1090,7 @@ def test_delete(self): response = self.assertDeleteSubmit( delete_url, self.admin, object_deactivated=self.ex_channel, success_status=200 ) - self.assertEqual("/org/workspace/", response["Temba-Success"]) + self.assertEqual("/org/workspace/", response["X-Temba-Success"]) # reactivate self.ex_channel.is_active = True @@ -1340,6 +1367,29 @@ def test_trim_task(self): class ChannelLogTest(TembaTest): + def test_get_by_uuid(self): + log1 = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, + http_logs=[{"url": "https://foo.bar/send1"}], + errors=[{"code": "bad_response", "message": "response not right"}], + ) + log2 = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_STATUS, + http_logs=[{"url": "https://foo.bar/send2"}], + errors=[], + ) + + self.assertEqual([], ChannelLog.get_by_uuid(self.channel, [])) + + logs = ChannelLog.get_by_uuid(self.channel, [log1.uuid, log2.uuid]) + self.assertEqual(2, len(logs)) + self.assertEqual(log1.uuid, logs[0].uuid) + self.assertEqual(self.channel, logs[0].channel) + self.assertEqual(ChannelLog.LOG_TYPE_MSG_SEND, logs[0].log_type) + self.assertEqual(log2.uuid, logs[1].uuid) + self.assertEqual(self.channel, logs[1].channel) + self.assertEqual(ChannelLog.LOG_TYPE_MSG_STATUS, logs[1].log_type) + def test_get_display(self): channel = self.create_channel("TG", "Telegram", "mybot") contact = self.create_contact("Fred Jones", urns=["telegram:74747474"]) @@ -1430,16 +1480,25 @@ def test_get_display(self): ) def test_get_display_timed_out(self): - channel = self.create_channel("TG", "Telegram", "mybot") - contact = self.create_contact("Fred Jones", urns=["telegram:74747474"]) + channel = self.create_channel( + "D3C", + "360Dialog channel", + address="1234", + country="BR", + config={ + Channel.CONFIG_BASE_URL: "https://waba-v2.360dialog.io", + Channel.CONFIG_AUTH_TOKEN: "123456789", + }, + ) + contact = self.create_contact("Bob", urns=["whatsapp:75757575"]) log = ChannelLog.objects.create( channel=channel, log_type=ChannelLog.LOG_TYPE_MSG_SEND, is_error=True, http_logs=[ { - "url": "https://telegram.com/send?to=74747474", - "request": 'POST https://telegram.com/send?to=74747474 HTTP/1.1\r\n\r\n{"to":"74747474"}', + "url": "https://waba-v2.360dialog.io/send?to=75757575", + "request": 'POST https://waba-v2.360dialog.io/send?to=75757575 HTTP/1.1\r\n\r\n{"to":"75757575"}', "elapsed_ms": 30001, "retries": 0, "created_on": "2022-08-17T14:07:30Z", @@ -1455,8 +1514,8 @@ def test_get_display_timed_out(self): "type": "msg_send", "http_logs": [ { - "url": "https://telegram.com/send?to=74747474", - "request": 'POST https://telegram.com/send?to=74747474 HTTP/1.1\r\n\r\n{"to":"74747474"}', + "url": "https://waba-v2.360dialog.io/send?to=75757575", + "request": 'POST https://waba-v2.360dialog.io/send?to=75757575 HTTP/1.1\r\n\r\n{"to":"75757575"}', "elapsed_ms": 30001, "retries": 0, "created_on": "2022-08-17T14:07:30Z", @@ -1468,14 +1527,15 @@ def test_get_display_timed_out(self): }, log.get_display(anonymize=False, urn=msg_out.contact_urn), ) + self.assertEqual( { "uuid": str(log.uuid), "type": "msg_send", "http_logs": [ { - "url": "https://telegram.com/send?to=********", - "request": 'POST https://telegram.com/send?to=******** HTTP/1.1\r\n\r\n{"to":"********"}', + "url": "https://waba-v2.360dialog.io/send?to=********", + "request": 'POST https://waba-v2.360dialog.io/send?to=******** HTTP/1.1\r\n\r\n{"to":"********"}', "response": "", "elapsed_ms": 30001, "retries": 0, @@ -1519,10 +1579,8 @@ class ChannelLogCRUDLTest(CRUDLTestMixin, TembaTest): def test_msg(self): contact = self.create_contact("Fred", phone="+12067799191") - log1 = ChannelLog.objects.create( - channel=self.channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, + log1 = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://foo.bar/send1", @@ -1531,15 +1589,12 @@ def test_msg(self): "response": "HTTP/1.0 200 OK\r\r\r\n", "elapsed_ms": 12, "retries": 0, - "created_on": "2022-01-01T00:00:00Z", + "created_on": "2024-09-16T00:00:00Z", } ], - errors=[], ) - log2 = ChannelLog.objects.create( - channel=self.channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, + log2 = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://foo.bar/send2", @@ -1548,29 +1603,26 @@ def test_msg(self): "response": "HTTP/1.0 200 OK\r\r\r\n", "elapsed_ms": 12, "retries": 0, - "created_on": "2022-01-01T00:00:00Z", + "created_on": "2024-09-16T00:00:00Z", } ], - errors=[], ) msg1 = self.create_outgoing_msg(contact, "success message", status="D", logs=[log1, log2]) # create another msg and log that shouldn't be included - log3 = ChannelLog.objects.create( - channel=self.channel, - is_error=False, + log3 = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://foo.bar/send3", "status_code": 200, - "request": "POST https://foo.bar/send2\r\n\r\n{}", + "request": "POST https://foo.bar/send3\r\n\r\n{}", "response": "HTTP/1.0 200 OK\r\r\r\n", "elapsed_ms": 12, "retries": 0, - "created_on": "2022-01-01T00:00:00Z", + "created_on": "2024-09-16T00:00:00Z", } ], - errors=[], ) self.create_outgoing_msg(contact, "success message", status="D", logs=[log3]) @@ -1585,39 +1637,26 @@ def test_msg(self): response = self.client.get(msg1_url) self.assertEqual(f"/settings/channels/{self.channel.uuid}", response.headers[TEMBA_MENU_SELECTION]) - # try when log objects are in storage rather than the database - ChannelLog.objects.all().delete() - - storages["logs"].save( - f"channels/{self.channel.uuid}/{str(log1.uuid)[:4]}/{log1.uuid}.json", - io.StringIO(json.dumps(log1._get_json())), - ) - storages["logs"].save( - f"channels/{self.channel.uuid}/{str(log2.uuid)[:4]}/{log2.uuid}.json", - io.StringIO(json.dumps(log2._get_json())), - ) - - response = self.assertListFetch(msg1_url, [self.admin], context_objects=[]) - self.assertEqual(2, len(response.context["logs"])) - self.assertEqual("https://foo.bar/send1", response.context["logs"][0]["http_logs"][0]["url"]) - self.assertEqual("https://foo.bar/send2", response.context["logs"][1]["http_logs"][0]["url"]) - - # missing logs are logged as errors and ignored - storages["logs"].delete(f"channels/{self.channel.uuid}/{str(log2.uuid)[:4]}/{log2.uuid}.json") - - response = self.assertListFetch(msg1_url, [self.admin], context_objects=[]) - self.assertEqual(1, len(response.context["logs"])) - def test_call(self): contact = self.create_contact("Fred", phone="+12067799191") flow = self.create_flow("IVR") - call1 = self.create_incoming_call(flow, contact) - log1 = ChannelLog.objects.get() - log2 = ChannelLog.objects.create( - channel=self.channel, - log_type=ChannelLog.LOG_TYPE_IVR_START, - is_error=False, + log1 = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, + http_logs=[ + { + "url": "https://foo.bar/call1", + "status_code": 200, + "request": "POST https://foo.bar/send1\r\n\r\n{}", + "response": "HTTP/1.0 200 OK\r\r\r\n", + "elapsed_ms": 12, + "retries": 0, + "created_on": "2024-09-16T00:00:00Z", + } + ], + ) + log2 = self.create_channel_log( + ChannelLog.LOG_TYPE_IVR_START, http_logs=[ { "url": "https://foo.bar/call2", @@ -1629,20 +1668,32 @@ def test_call(self): "created_on": "2022-01-01T00:00:00Z", } ], - errors=[], ) - call1.log_uuids = [log1.uuid, log2.uuid] - call1.save(update_fields=("log_uuids",)) + call1 = self.create_incoming_call(flow, contact, logs=[log1, log2]) # create another call and log that shouldn't be included - self.create_incoming_call(flow, contact) + log3 = self.create_channel_log( + ChannelLog.LOG_TYPE_IVR_START, + http_logs=[ + { + "url": "https://foo.bar/call2", + "status_code": 200, + "request": "POST /send2\r\n\r\n{}", + "response": "HTTP/1.0 200 OK\r\r\r\n", + "elapsed_ms": 12, + "retries": 0, + "created_on": "2022-01-01T00:00:00Z", + } + ], + ) + self.create_incoming_call(flow, contact, logs=[log3]) call1_url = reverse("channels.channellog_call", args=[self.channel.uuid, call1.id]) self.assertRequestDisallowed(call1_url, [None, self.user, self.editor, self.agent, self.admin2]) response = self.assertListFetch(call1_url, [self.admin], context_objects=[]) self.assertEqual(2, len(response.context["logs"])) - self.assertEqual("https://acme-calls.com/reply", response.context["logs"][0]["http_logs"][0]["url"]) + self.assertEqual("https://foo.bar/call1", response.context["logs"][0]["http_logs"][0]["url"]) self.assertEqual("https://foo.bar/call2", response.context["logs"][1]["http_logs"][0]["url"]) def test_read_and_list(self): @@ -1741,10 +1792,8 @@ def test_redaction_for_telegram(self): urn = "telegram:3527065" contact = self.create_contact("Fred Jones", urns=[urn]) channel = self.create_channel("TG", "Test TG Channel", "234567") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, + log = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://api.telegram.org/65474/sendMessage", @@ -1787,10 +1836,8 @@ def test_redaction_for_telegram_with_invalid_json(self): urn = "telegram:3527065" contact = self.create_contact("Fred Jones", urns=[urn]) channel = self.create_channel("TG", "Test TG Channel", "234567") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, + log = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://api.telegram.org/65474/sendMessage", @@ -1822,10 +1869,8 @@ def test_redaction_for_telegram_when_no_match(self): urn = "telegram:3527065" contact = self.create_contact("Fred Jones", urns=[urn]) channel = self.create_channel("TG", "Test TG Channel", "234567") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, + log = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://api.telegram.org/There is no contact identifying information", @@ -1853,84 +1898,12 @@ def test_redaction_for_telegram_when_no_match(self): response = self.client.get(read_url) self.assertRedacted(response, ("3527065", "api.telegram.org", "/65474/sendMessage")) - def test_redaction_for_twitter(self): - urn = "twitterid:767659860" - contact = self.create_contact("Fred Jones", urns=[urn]) - channel = self.create_channel("TWT", "Test TWT Channel", "nyaruka") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_RECEIVE, - is_error=False, - http_logs=[ - { - "url": "https://textit.in/c/twt/5c70a767-f3dc-4a99-9323-4774f6432af5/receive", - "status_code": 200, - "request": 'POST /c/twt/5c70a767-f3dc-4a99-9323-4774f6432af5/receive HTTP/1.1\r\nHost: textit.in\r\nContent-Length: 1596\r\nContent-Type: application/json\r\nFinagle-Ctx-Com.twitter.finagle.deadline: 1560853608671000000 1560853611615000000\r\nFinagle-Ctx-Com.twitter.finagle.retries: 0\r\nFinagle-Http-Retryable-Request: \r\nX-Amzn-Trace-Id: Root=1-5d08bc68-de52174e83904d614a32a5c6\r\nX-B3-Flags: 2\r\nX-B3-Parentspanid: fe22fff79af84311\r\nX-B3-Sampled: false\r\nX-B3-Spanid: 86f3c3871ae31c2d\r\nX-B3-Traceid: fe22fff79af84311\r\nX-Forwarded-For: 199.16.157.173\r\nX-Forwarded-Port: 443\r\nX-Forwarded-Proto: https\r\nX-Twitter-Webhooks-Signature: sha256=CYVI5q7e7bzKufCD3GnZoJheSmjVRmNQo9uzO/gi4tA=\r\n\r\n{"for_user_id":"3753944237","direct_message_events":[{"type":"message_create","id":"1140928844112814089","created_timestamp":"1560853608526","message_create":{"target":{"recipient_id":"3753944237"},"sender_id":"767659860","message_data":{"text":"Briefly what will you be talking about and do you have any feature stories","entities":{"hashtags":[],"symbols":[],"user_mentions":[],"urls":[]}}}}],"users":{"767659860":{"id":"767659860","created_timestamp":"1345386861000","name":"Aaron Tumukunde","screen_name":"tumaaron","description":"Mathematics \u25a1 Media \u25a1 Real Estate \u25a1 And Jesus above all.","protected":false,"verified":false,"followers_count":167,"friends_count":485,"statuses_count":237,"profile_image_url":"http://pbs.twimg.com/profile_images/860380640029573120/HKuXgxR__normal.jpg","profile_image_url_https":"https://pbs.twimg.com/profile_images/860380640029573120/HKuXgxR__normal.jpg"},"3753944237":{"id":"3753944237","created_timestamp":"1443048916258","name":"Teheca","screen_name":"tehecaug","location":"Uganda","description":"We connect new mothers & parents to nurses for postnatal care. #Google LaunchPad Africa 2018, #UNFPA UpAccelerate 2017 #MasterCard Innovation exp 2017 #YCSUS18","url":"https://t.co/i0hcLRwEj7","protected":false,"verified":false,"followers_count":3369,"friends_count":4872,"statuses_count":1128,"profile_image_url":"http://pbs.twimg.com/profile_images/694638274204143616/Q4Mbg1tO_normal.png","profile_image_url_https":"https://pbs.twimg.com/profile_images/694638274204143616/Q4Mbg1tO_normal.png"}}}', - "response": 'HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\n{"message":"Message Accepted","data":[{"type":"msg","channel_uuid":"5c70a767-f3dc-4a99-9323-4774f6432af5","msg_uuid":"6c26277d-7002-4489-9b7f-998d4be5d0db","text":"Briefly what will you be talking about and do you have any feature stories","urn":"twitterid:767659860#tumaaron","external_id":"1140928844112814089","received_on":"2019-06-18T10:26:48.526Z"}]}', - "elapsed_ms": 12, - "retries": 0, - "created_on": "2022-01-01T00:00:00Z", - } - ], - ) - msg = self.create_incoming_msg(contact, "incoming msg", channel=channel, logs=[log]) - - self.login(self.admin) - - read_url = reverse("channels.channellog_msg", args=[channel.uuid, msg.id]) - - # check read page shows un-redacted content for a regular org - response = self.client.get(read_url) - self.assertNotRedacted(response, ("767659860", "Aaron Tumukunde", "tumaaron")) - - # but for anon org we see redaction... - with self.anonymous(self.org): - response = self.client.get(read_url) - self.assertRedacted(response, ("767659860", "Aaron Tumukunde", "tumaaron")) - - def test_redaction_for_twitter_when_no_match(self): - urn = "twitterid:767659860" - contact = self.create_contact("Fred Jones", urns=[urn]) - channel = self.create_channel("TWT", "Test TWT Channel", "nyaruka") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, - http_logs=[ - { - "url": "https://twitter.com/There is no contact identifying information", - "status_code": 200, - "request": 'POST /65474/sendMessage HTTP/1.1\r\nHost: api.telegram.org\r\nUser-Agent: Courier/1.2.159\r\nContent-Length: 231\r\nContent-Type: application/x-www-form-urlencoded\r\nAccept-Encoding: gzip\r\n\r\n{"json": "There is no contact identifying information"}', - "response": 'HTTP/1.1 200 OK\r\nContent-Length: 298\r\nContent-Type: application/json\r\n\r\n{"json": "There is no contact identifying information"}', - "elapsed_ms": 12, - "retries": 0, - "created_on": "2022-01-01T00:00:00Z", - } - ], - ) - msg = self.create_incoming_msg(contact, "incoming msg", channel=channel, logs=[log]) - - self.login(self.admin) - - read_url = reverse("channels.channellog_msg", args=[channel.uuid, msg.id]) - - # check read page shows un-redacted content for a regular org - response = self.client.get(read_url) - self.assertNotRedacted(response, ("767659860",)) - - # but for anon org we see complete redaction... - with self.anonymous(self.org): - response = self.client.get(read_url) - self.assertRedacted(response, ("767659860", "twitter.com", "/65474/sendMessage")) - def test_redaction_for_facebook(self): urn = "facebook:2150393045080607" contact = self.create_contact("Fred Jones", urns=[urn]) channel = self.create_channel("FB", "Test FB Channel", "54764868534") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_RECEIVE, - is_error=False, + log = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": f"https://textit.in/c/fb/{channel.uuid}/receive", @@ -1965,10 +1938,8 @@ def test_redaction_for_facebook_when_no_match(self): urn = "facebook:2150393045080607" contact = self.create_contact("Fred Jones", urns=[urn]) channel = self.create_channel("FB", "Test FB Channel", "54764868534") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_SEND, - is_error=False, + log = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://facebook.com/There is no contact identifying information", @@ -1999,10 +1970,8 @@ def test_redaction_for_facebook_when_no_match(self): def test_redaction_for_twilio(self): contact = self.create_contact("Fred Jones", phone="+593979099111") channel = self.create_channel("T", "Test Twilio Channel", "+12345") - log = ChannelLog.objects.create( - channel=channel, - log_type=ChannelLog.LOG_TYPE_MSG_STATUS, - is_error=False, + log = self.create_channel_log( + ChannelLog.LOG_TYPE_MSG_SEND, http_logs=[ { "url": "https://textit.in/c/t/1234-5678/status?id=2466753&action=callback", diff --git a/temba/channels/types/android/tests.py b/temba/channels/types/android/tests.py index 9e0c56ad121..eb28d3b28cb 100644 --- a/temba/channels/types/android/tests.py +++ b/temba/channels/types/android/tests.py @@ -5,13 +5,16 @@ from temba.contacts.models import URN from temba.orgs.models import Org from temba.tests import CRUDLTestMixin, TembaTest +from temba.tests.mailroom import mock_mailroom from temba.utils import get_anonymous_user from ...models import Channel class AndroidTypeTest(TembaTest, CRUDLTestMixin): - def test_claim(self): + + @mock_mailroom + def test_claim(self, mr_mocks): # remove our explicit country so it needs to be derived from channels self.org.country = None self.org.timezone = "UTC" diff --git a/temba/channels/types/bandwidth/type.py b/temba/channels/types/bandwidth/type.py index 7f8f275ad0b..9f77daf32f3 100644 --- a/temba/channels/types/bandwidth/type.py +++ b/temba/channels/types/bandwidth/type.py @@ -24,6 +24,7 @@ class BandwidthType(ChannelType): courier_url = r"^bw/(?P[a-z0-9\-]+)/(?Preceive|status)$" schemes = [URN.TEL_SCHEME] + async_activation = False claim_view = ClaimView claim_blurb = _("If you have an %(link)s number, you can quickly connect it using their APIs.") % { diff --git a/temba/channels/types/dialog360_legacy/tests.py b/temba/channels/types/dialog360_legacy/tests.py index 95decf7ff02..93b44fb0f73 100644 --- a/temba/channels/types/dialog360_legacy/tests.py +++ b/temba/channels/types/dialog360_legacy/tests.py @@ -142,12 +142,18 @@ def test_check_health(self): with patch("requests.get") as mock_get: mock_get.side_effect = [ MockResponse(200, '{"meta": {"api_status": "stable", "version": "2.35.4"}}'), - MockResponse(401, '{"meta": {"api_status": "stable", "version": "2.35.4"}}'), + MockResponse(401, ""), ] channel.type.check_health(channel) mock_get.assert_called_with( "https://example.com/whatsapp/v1/health", headers={"D360-API-KEY": "123456789", "Content-Type": "application/json"}, ) - with self.assertRaises(Exception): + + with patch("logging.Logger.debug") as mock_log_debug: channel.type.check_health(channel) + self.assertEqual(1, mock_log_debug.call_count) + self.assertEqual( + "Error checking API health: b''", + mock_log_debug.call_args[0][0], + ) diff --git a/temba/channels/types/dialog360_legacy/type.py b/temba/channels/types/dialog360_legacy/type.py index 5807e05cdc6..f5a0c841c6f 100644 --- a/temba/channels/types/dialog360_legacy/type.py +++ b/temba/channels/types/dialog360_legacy/type.py @@ -1,3 +1,5 @@ +import logging + import requests from django.forms import ValidationError @@ -13,6 +15,8 @@ from ...models import ChannelType, ConfigUI +logger = logging.getLogger(__name__) + class Dialog360LegacyType(ChannelType): """ @@ -67,12 +71,20 @@ def fetch_templates(self, channel) -> list: return response.json()["waba_templates"] def check_health(self, channel): + start = timezone.now() response = requests.get( channel.config[Channel.CONFIG_BASE_URL] + "/v1/health", headers=self.get_headers(channel) ) if response.status_code != 200: - raise requests.RequestException("Could not check api status", response=response) + HTTPLog.from_exception( + HTTPLog.WHATSAPP_CHECK_HEALTH, + requests.RequestException("Could not check api status", response=response), + start, + channel=channel, + ) + logger.debug(f"Error checking API health: {response.content}") + return return response diff --git a/temba/channels/types/external/tests.py b/temba/channels/types/external/tests.py index 4d49b30491e..4589d08b8d2 100644 --- a/temba/channels/types/external/tests.py +++ b/temba/channels/types/external/tests.py @@ -29,7 +29,7 @@ def test_claim(self, mock_socket_hostname): post_data["country"] = "RW" post_data["url"] = "http://localhost:8000/foo" post_data["method"] = "POST" - post_data["body"] = "send=true" + post_data["body"] = '{"from":"{{from_no_plus}}","to":{{to_no_plus}},"text":\'{{text}}\' }' post_data["content_type"] = Channel.CONTENT_TYPE_JSON post_data["max_length"] = 180 post_data["send_authorization"] = "Token 123" @@ -55,6 +55,12 @@ def test_claim(self, mock_socket_hostname): post_data["scheme"] = "tel" post_data["number"] = "12345" response = self.client.post(url, post_data) + self.assertFormError( + response.context["form"], "body", "Invalid JSON, make sure to remove quotes around variables" + ) + + post_data["body"] = '{"from":{{from_no_plus}},"to":{{to_no_plus}},"text":{{text}},"channel":{{channel}} }' + response = self.client.post(url, post_data) channel = Channel.objects.get() self.assertEqual(channel.country, "RW") @@ -67,7 +73,10 @@ def test_claim(self, mock_socket_hostname): self.assertEqual(channel.config[ExternalType.CONFIG_SEND_AUTHORIZATION], "Token 123") self.assertEqual(channel.channel_type, "EX") self.assertEqual(Channel.ENCODING_SMART, channel.config[Channel.CONFIG_ENCODING]) - self.assertEqual("send=true", channel.config[ExternalType.CONFIG_SEND_BODY]) + self.assertEqual( + '{"from":{{from_no_plus}},"to":{{to_no_plus}},"text":{{text}},"channel":{{channel}} }', + channel.config[ExternalType.CONFIG_SEND_BODY], + ) self.assertEqual("SENT", channel.config[ExternalType.CONFIG_MT_RESPONSE_CHECK]) config_url = reverse("channels.channel_configuration", args=[channel.uuid]) @@ -85,31 +94,33 @@ def test_claim(self, mock_socket_hostname): # test substitution in our url self.assertEqual( "http://example.com/send.php?from=5080&text=test&to=%2B250788383383", - channel.replace_variables(ext_url, {"from": "5080", "text": "test", "to": "+250788383383"}), + ExternalType.replace_variables(ext_url, {"from": "5080", "text": "test", "to": "+250788383383"}), ) # test substitution with unicode self.assertEqual( "http://example.com/send.php?from=5080&text=Reply+%E2%80%9C1%E2%80%9D+for+good&to=%2B250788383383", - channel.replace_variables(ext_url, {"from": "5080", "text": "Reply “1” for good", "to": "+250788383383"}), + ExternalType.replace_variables( + ext_url, {"from": "5080", "text": "Reply “1” for good", "to": "+250788383383"} + ), ) # test substitution with XML encoding body = "{{text}}" self.assertEqual( "Hello & World", - channel.replace_variables(body, {"text": "Hello & World"}, Channel.CONTENT_TYPE_XML), + ExternalType.replace_variables(body, {"text": "Hello & World"}, Channel.CONTENT_TYPE_XML), ) self.assertEqual( - "التوطين", channel.replace_variables(body, {"text": "التوطين"}, Channel.CONTENT_TYPE_XML) + "التوطين", ExternalType.replace_variables(body, {"text": "التوطين"}, Channel.CONTENT_TYPE_XML) ) # test substitution with JSON encoding body = "{ body: {{text}} }" self.assertEqual( '{ body: "this is \\"quote\\"" }', - channel.replace_variables(body, {"text": 'this is "quote"'}, Channel.CONTENT_TYPE_JSON), + ExternalType.replace_variables(body, {"text": 'this is "quote"'}, Channel.CONTENT_TYPE_JSON), ) # raw content type should be loaded on setting page as is @@ -131,9 +142,10 @@ def test_claim(self, mock_socket_hostname): post_data["scheme"] = "ext" post_data["address"] = "123456789" - post_data["url"] = "http://example.com/send.php?from={{from}}&text={{text}}&to={{to}}" - post_data["method"] = "GET" + post_data["url"] = "http://example.com/send.php" + post_data["method"] = "POST" post_data["content_type"] = Channel.CONTENT_TYPE_JSON + post_data["body"] = '{"from":{{from_no_plus}},"to":{{to_no_plus}},"text":{{text}} }' post_data["max_length"] = 180 post_data["encoding"] = Channel.ENCODING_SMART diff --git a/temba/channels/types/external/type.py b/temba/channels/types/external/type.py index 47839e3d2e5..731e377867f 100644 --- a/temba/channels/types/external/type.py +++ b/temba/channels/types/external/type.py @@ -1,3 +1,7 @@ +import json +from urllib.parse import quote_plus +from xml.sax.saxutils import escape + from django.utils.translation import gettext_lazy as _ from ...models import Channel, ChannelType, ConfigUI @@ -35,6 +39,27 @@ class ExternalType(ChannelType): "&channel={{channel}}" ) + @classmethod + def replace_variables(cls, text, variables, content_type=Channel.CONTENT_TYPE_URLENCODED): + for key in variables.keys(): + replacement = str(variables[key]) + + # encode based on our content type + if content_type == Channel.CONTENT_TYPE_URLENCODED: + replacement = quote_plus(replacement) + + # if this is JSON, need to wrap in quotes (and escape them) + elif content_type == Channel.CONTENT_TYPE_JSON: + replacement = json.dumps(replacement) + + # XML needs to be escaped + elif content_type == Channel.CONTENT_TYPE_XML: + replacement = escape(replacement) + + text = text.replace("{{%s}}" % key, replacement) + + return text + def get_config_ui_context(self, channel): context = super().get_config_ui_context(channel) @@ -55,8 +80,8 @@ def get_config_ui_context(self, channel): content_type = config.get(ExternalType.CONFIG_CONTENT_TYPE, Channel.CONTENT_TYPE_URLENCODED) context["example_content_type"] = "Content-Type: " + Channel.CONTENT_TYPES.get(content_type, content_type) - context["example_url"] = Channel.replace_variables(send_url, example_payload) - context["example_body"] = Channel.replace_variables(send_body, example_payload, content_type) + context["example_url"] = ExternalType.replace_variables(send_url, example_payload) + context["example_body"] = ExternalType.replace_variables(send_body, example_payload, content_type) quick_replies_payload = {} @@ -67,10 +92,10 @@ def get_config_ui_context(self, channel): else: quick_replies_payload["quick_replies"] = "&quick_reply=One&quick_reply=Two&quick_reply=Three" - context["example_url"] = Channel.replace_variables( + context["example_url"] = ExternalType.replace_variables( context["example_url"], quick_replies_payload, "don't encode" ) - context["example_body"] = Channel.replace_variables( + context["example_body"] = ExternalType.replace_variables( context["example_body"], quick_replies_payload, "don't encode" ) return context diff --git a/temba/channels/types/external/views.py b/temba/channels/types/external/views.py index 9031b6acd01..f4dc8f6430e 100644 --- a/temba/channels/types/external/views.py +++ b/temba/channels/types/external/views.py @@ -1,3 +1,5 @@ +import json + from smartmin.views import SmartFormView from django import forms @@ -93,6 +95,8 @@ class ClaimForm(ClaimViewMixin.Form): ) def clean(self): + from .type import ExternalType + cleaned_data = super().clean() scheme = cleaned_data.get("scheme") if scheme == URN.TEL_SCHEME and not cleaned_data.get("number"): @@ -100,6 +104,28 @@ def clean(self): elif scheme != URN.TEL_SCHEME and not cleaned_data.get("address"): raise ValidationError({"address": _("This field is required.")}) + content_type = cleaned_data.get("content_type") + + variables = { + "text": "", + "from": "", + "from_no_plus": "", + "to": "", + "to_no_plus": "", + "id": "", + "quick_replies": "", + "channel": "", + } + replaced_body = ExternalType.replace_variables( + cleaned_data.get("body"), variables, content_type=content_type + ) + if content_type == Channel.CONTENT_TYPE_JSON: + try: + + json.loads(replaced_body) + except json.decoder.JSONDecodeError: + raise ValidationError({"body": _("Invalid JSON, make sure to remove quotes around variables")}) + class SendClaimForm(ClaimViewMixin.Form): url = ExternalURLField( max_length=1024, diff --git a/temba/channels/types/facebookapp/tests.py b/temba/channels/types/facebookapp/tests.py index 35b010b160b..dc8a71d2875 100644 --- a/temba/channels/types/facebookapp/tests.py +++ b/temba/channels/types/facebookapp/tests.py @@ -340,6 +340,8 @@ def test_new_conversation_triggers(self): params={"access_token": "09876543"}, ) mock_post.reset_mock() + trigger.refresh_from_db() + self.assertFalse(trigger.is_archived) with patch("requests.delete") as mock_post: mock_post.return_value = MockResponse(200, json.dumps({"success": True})) @@ -353,6 +355,8 @@ def test_new_conversation_triggers(self): params={"access_token": "09876543"}, ) mock_post.reset_mock() + trigger.refresh_from_db() + self.assertTrue(trigger.is_archived) with patch("requests.post") as mock_post: mock_post.return_value = MockResponse(200, json.dumps({"success": True})) @@ -366,6 +370,24 @@ def test_new_conversation_triggers(self): params={"access_token": "09876543"}, ) mock_post.reset_mock() + trigger.refresh_from_db() + self.assertFalse(trigger.is_archived) + + with patch("requests.delete") as mock_post: + mock_post.side_effect = [MockResponse(400, "Error found")] + + with self.assertRaises(Exception): + trigger.archive(self.admin) + + mock_post.assert_called_once_with( + "https://graph.facebook.com/v18.0/me/messenger_profile", + json={"fields": ["get_started"]}, + headers={"Content-Type": "application/json"}, + params={"access_token": "09876543"}, + ) + mock_post.reset_mock() + trigger.refresh_from_db() + self.assertTrue(trigger.is_archived) def test_get_error_ref_url(self): self.assertEqual( diff --git a/temba/channels/types/facebookapp/type.py b/temba/channels/types/facebookapp/type.py index c263e739ba9..2a27aba83f0 100644 --- a/temba/channels/types/facebookapp/type.py +++ b/temba/channels/types/facebookapp/type.py @@ -38,7 +38,7 @@ def get_urls(self): return [ self.get_claim_url(), re_path( - r"^(?P[a-z0-9\-]+)/refresh_token$", RefreshToken.as_view(channel_type=self), name="refresh_token" + r"^(?P[a-z0-9\-]+)/refresh_token/$", RefreshToken.as_view(channel_type=self), name="refresh_token" ), ] @@ -76,7 +76,7 @@ def deactivate_trigger(self, trigger): url, json=body, params={"access_token": access_token}, headers={"Content-Type": "application/json"} ) - if response.status_code != 200: # pragma: no cover + if response.status_code != 200: raise Exception("Unable to update call to action: %s" % response.text) def get_redact_values(self, channel) -> tuple: # pragma: needs cover diff --git a/temba/channels/types/facebookapp/views.py b/temba/channels/types/facebookapp/views.py index 5c0097c9f2e..30628074bcb 100644 --- a/temba/channels/types/facebookapp/views.py +++ b/temba/channels/types/facebookapp/views.py @@ -6,7 +6,7 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from temba.orgs.views import ModalMixin, OrgObjPermsMixin +from temba.orgs.views.mixins import OrgObjPermsMixin from temba.utils.text import truncate from ...models import Channel @@ -136,7 +136,7 @@ def form_valid(self, form): return super().form_valid(form) -class RefreshToken(ChannelTypeMixin, ModalMixin, OrgObjPermsMixin, SmartModelActionView): +class RefreshToken(ChannelTypeMixin, OrgObjPermsMixin, SmartModelActionView, SmartFormView): class Form(forms.Form): user_access_token = forms.CharField(min_length=32, required=True, help_text=_("The User Access Token")) fb_user_id = forms.CharField( @@ -152,6 +152,9 @@ class Form(forms.Form): title = _("Reconnect Facebook Page") menu_path = "/settings/workspace" + def derive_menu_path(self): + return f"/settings/channels/{self.get_object().uuid}" + def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["refresh_url"] = reverse("channels.types.facebookapp.refresh_token", args=(self.object.uuid,)) diff --git a/temba/channels/types/instagram/type.py b/temba/channels/types/instagram/type.py index 67656ca72f3..a36a2e73110 100644 --- a/temba/channels/types/instagram/type.py +++ b/temba/channels/types/instagram/type.py @@ -35,7 +35,7 @@ def get_urls(self): return [ self.get_claim_url(), re_path( - r"^(?P[a-z0-9\-]+)/refresh_token$", + r"^(?P[a-z0-9\-]+)/refresh_token/$", RefreshToken.as_view(channel_type=self), name="refresh_token", ), diff --git a/temba/channels/types/instagram/views.py b/temba/channels/types/instagram/views.py index 35f3d31b9a2..0d943692cc2 100644 --- a/temba/channels/types/instagram/views.py +++ b/temba/channels/types/instagram/views.py @@ -8,7 +8,7 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from temba.orgs.views import ModalMixin, OrgObjPermsMixin +from temba.orgs.views.mixins import OrgObjPermsMixin from temba.utils.text import truncate from ...models import Channel @@ -167,7 +167,7 @@ def form_valid(self, form): return super().form_valid(form) -class RefreshToken(ChannelTypeMixin, ModalMixin, OrgObjPermsMixin, SmartModelActionView): +class RefreshToken(ChannelTypeMixin, OrgObjPermsMixin, SmartModelActionView, SmartFormView): class Form(forms.Form): user_access_token = forms.CharField(min_length=32, required=True, help_text=_("The User Access Token")) fb_user_id = forms.CharField( @@ -183,6 +183,9 @@ class Form(forms.Form): template_name = "channels/types/instagram/refresh_token.html" title = _("Reconnect Instagram Business Account") + def derive_menu_path(self): + return f"/settings/channels/{self.get_object().uuid}" + def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["refresh_url"] = reverse("channels.types.instagram.refresh_token", args=(self.object.uuid,)) diff --git a/temba/channels/types/jasmin/tests.py b/temba/channels/types/jasmin/tests.py index 99e5354e464..69e2403004c 100644 --- a/temba/channels/types/jasmin/tests.py +++ b/temba/channels/types/jasmin/tests.py @@ -28,7 +28,7 @@ def test_claim(self, mock_socket_hostname): post_data["number"] = "250788123123" post_data["username"] = "user1" post_data["password"] = "pass1" - post_data["url"] = "https://nyaruka.com/send" + post_data["url"] = "https://textit.com/send" response = self.client.post(url, post_data) @@ -58,7 +58,7 @@ def test_claim(self, mock_socket_hostname): post_data["number"] = "200" post_data["username"] = "user1" post_data["password"] = "pass1" - post_data["url"] = "https://nyaruka.com/send" + post_data["url"] = "https://textit.com/send" response = self.client.post(url, post_data) diff --git a/temba/channels/types/justcall/type.py b/temba/channels/types/justcall/type.py index 0a5dae947a8..b5a678a2a7a 100644 --- a/temba/channels/types/justcall/type.py +++ b/temba/channels/types/justcall/type.py @@ -21,6 +21,7 @@ class JustCallType(ChannelType): courier_url = r"^jcl/(?P[a-z0-9\-]+)/(?Preceive|status)$" schemes = [URN.TEL_SCHEME] + async_activation = False claim_view = ClaimView claim_blurb = _("If you have a %(link)s number, you can quickly connect it using their APIs.") % { diff --git a/temba/channels/types/kannel/tests.py b/temba/channels/types/kannel/tests.py index 58d7fd0b61e..c2b3c431067 100644 --- a/temba/channels/types/kannel/tests.py +++ b/temba/channels/types/kannel/tests.py @@ -29,7 +29,7 @@ def test_claim(self, mock_socket_hostname): post_data["number"] = "3071" post_data["country"] = "RW" - post_data["url"] = "http://nyaruka.com/cgi-bin/sendsms" + post_data["url"] = "http://textit.com/cgi-bin/sendsms" post_data["verify_ssl"] = False post_data["encoding"] = Channel.ENCODING_SMART diff --git a/temba/channels/types/mailgun/__init__.py b/temba/channels/types/mailgun/__init__.py deleted file mode 100644 index 247ca8a2082..00000000000 --- a/temba/channels/types/mailgun/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .type import MailgunType # noqa diff --git a/temba/channels/types/mailgun/tests.py b/temba/channels/types/mailgun/tests.py deleted file mode 100644 index 17eecd4cdfd..00000000000 --- a/temba/channels/types/mailgun/tests.py +++ /dev/null @@ -1,61 +0,0 @@ -from django.urls import reverse - -from temba.tests import TembaTest - -from ...models import Channel - - -class MailgunTypeTest(TembaTest): - def test_claim(self): - claim_url = reverse("channels.types.mailgun.claim") - - self.login(self.admin) - - response = self.client.get(reverse("channels.channel_claim")) - self.assertNotContains(response, claim_url) - - self.login(self.customer_support, choose_org=self.org) - - response = self.client.get(reverse("channels.channel_claim")) - self.assertContains(response, claim_url) - - response = self.client.get(claim_url) - self.assertEqual(200, response.status_code) - self.assertEqual({"subject": "Chat with Nyaruka"}, response.context["form"].initial) - - # try to submit with invalid email address - response = self.client.post( - claim_url, - { - "address": "!!!!!!", - "subject": "Chat with Bob", - "sending_key": "0123456789", - "signing_key": "9876543210", - }, - follow=True, - ) - self.assertFormError(response.context["form"], "address", "Enter a valid email address.") - - response = self.client.post( - claim_url, - { - "address": "bob@acme.com", - "subject": "Chat with Bob", - "sending_key": "0123456789", - "signing_key": "9876543210", - }, - follow=True, - ) - self.assertEqual(200, response.status_code) - - channel = Channel.objects.get(channel_type="MLG") - self.assertEqual("bob@acme.com", channel.name) - self.assertEqual("bob@acme.com", channel.address) - self.assertEqual( - { - "auth_token": "0123456789", - "default_subject": "Chat with Bob", - "signing_key": "9876543210", - }, - channel.config, - ) diff --git a/temba/channels/types/mailgun/type.py b/temba/channels/types/mailgun/type.py deleted file mode 100644 index 12af39bff18..00000000000 --- a/temba/channels/types/mailgun/type.py +++ /dev/null @@ -1,43 +0,0 @@ -from django.utils.translation import gettext_lazy as _ - -from temba.contacts.models import URN - -from ...models import ChannelType, ConfigUI -from .views import ClaimView - - -class MailgunType(ChannelType): - """ - A Mailgun email channel. - """ - - code = "MLG" - name = "Mailgun" - category = ChannelType.Category.API - - courier_url = r"^mlg/(?P[a-z0-9\-]+)/receive$" - schemes = [URN.EMAIL_SCHEME] - - claim_blurb = _("Add a %(link)s channel to send and receive messages as emails.") % { - "link": 'Mailgun' - } - claim_view = ClaimView - - config_ui = ConfigUI( - blurb=_( - "To finish configuring this channel, you'll need to add a route for received messages that forwards them." - ), - endpoints=[ - ConfigUI.Endpoint( - courier="receive", - label=_("Receive URL"), - help=_("The URL to forward new emails to."), - ), - ], - ) - - CONFIG_DEFAULT_SUBJECT = "default_subject" - CONFIG_SIGNING_KEY = "signing_key" - - def is_available_to(self, org, user): - return user.is_staff, user.is_staff diff --git a/temba/channels/types/mailgun/views.py b/temba/channels/types/mailgun/views.py deleted file mode 100644 index a8f9a7a4a8e..00000000000 --- a/temba/channels/types/mailgun/views.py +++ /dev/null @@ -1,49 +0,0 @@ -from smartmin.views import SmartFormView - -from django import forms -from django.utils.translation import gettext_lazy as _ - -from ...models import Channel -from ...views import ClaimViewMixin - - -class ClaimView(ClaimViewMixin, SmartFormView): - class Form(ClaimViewMixin.Form): - address = forms.EmailField(label=_("Email Address"), help_text=_("The email address.")) - subject = forms.CharField(label=_("Subject"), help_text=_("The default subject for new emails.")) - sending_key = forms.CharField( - label=_("Sending API key"), - help_text=_("A sending API key you have configured for this domain."), - max_length=50, - ) - signing_key = forms.CharField( - label=_("Webhook Signing key"), - help_text=_("The signing key used for webhook calls."), - max_length=50, - ) - - form_class = Form - - def derive_initial(self): - return {"subject": f"Chat with {self.request.org.name}"} - - def form_valid(self, form): - from .type import MailgunType - - address = form.cleaned_data["address"] - - self.object = Channel.create( - self.request.org, - self.request.user, - None, - self.channel_type, - name=address, - address=address, - config={ - Channel.CONFIG_AUTH_TOKEN: form.cleaned_data["sending_key"], - MailgunType.CONFIG_DEFAULT_SUBJECT: form.cleaned_data["subject"], - MailgunType.CONFIG_SIGNING_KEY: form.cleaned_data["signing_key"], - }, - ) - - return super().form_valid(form) diff --git a/temba/channels/types/plivo/type.py b/temba/channels/types/plivo/type.py index 12d7521b352..e83df251568 100644 --- a/temba/channels/types/plivo/type.py +++ b/temba/channels/types/plivo/type.py @@ -45,6 +45,6 @@ def deactivate(self, channel): def get_urls(self): return [ self.get_claim_url(), - re_path(r"^search$", SearchView.as_view(channel_type=self), name="search"), - re_path(r"^connect$", Connect.as_view(channel_type=self), name="connect"), + re_path(r"^search/$", SearchView.as_view(channel_type=self), name="search"), + re_path(r"^connect/$", Connect.as_view(channel_type=self), name="connect"), ] diff --git a/temba/channels/types/plivo/views.py b/temba/channels/types/plivo/views.py index 84a1b51a06a..4c8cc70334e 100644 --- a/temba/channels/types/plivo/views.py +++ b/temba/channels/types/plivo/views.py @@ -13,7 +13,7 @@ from temba.channels.models import Channel from temba.channels.views import BaseClaimNumberMixin, ChannelTypeMixin, ClaimViewMixin -from temba.orgs.views import OrgPermsMixin +from temba.orgs.views.mixins import OrgPermsMixin from temba.utils import countries from temba.utils.fields import SelectWidget from temba.utils.http import http_headers diff --git a/temba/channels/types/telegram/type.py b/temba/channels/types/telegram/type.py index c18df5f0069..1a94758f88d 100644 --- a/temba/channels/types/telegram/type.py +++ b/temba/channels/types/telegram/type.py @@ -20,6 +20,8 @@ class TelegramType(ChannelType): courier_url = r"^tg/(?P[a-z0-9\-]+)/receive$" schemes = [URN.TELEGRAM_SCHEME] + async_activation = False + redact_response_keys = {"first_name", "last_name", "username"} claim_blurb = _( diff --git a/temba/channels/types/test/__init__.py b/temba/channels/types/test/__init__.py new file mode 100644 index 00000000000..e86525ced1c --- /dev/null +++ b/temba/channels/types/test/__init__.py @@ -0,0 +1 @@ +from .type import TestType # noqa diff --git a/temba/channels/types/test/tests.py b/temba/channels/types/test/tests.py new file mode 100644 index 00000000000..ed6d24c2918 --- /dev/null +++ b/temba/channels/types/test/tests.py @@ -0,0 +1,22 @@ +from django.urls import reverse + +from temba.tests import CRUDLTestMixin, TembaTest + +from ...models import Channel + + +class TestTypeTest(TembaTest, CRUDLTestMixin): + def test_claim(self): + claim_url = reverse("channels.types.test.claim") + + self.assertStaffOnly(claim_url) + + response = self.requestView(claim_url, self.customer_support, post_data={"tps": 50}, choose_org=self.org) + + self.assertEqual(302, response.status_code) + + channel = Channel.objects.filter(channel_type="TST").first() + + self.assertIsNotNone(channel) + self.assertEqual(50, channel.tps) + self.assertEqual(["ext"], channel.schemes) diff --git a/temba/channels/types/test/type.py b/temba/channels/types/test/type.py new file mode 100644 index 00000000000..2fba8f6d21e --- /dev/null +++ b/temba/channels/types/test/type.py @@ -0,0 +1,23 @@ +from django.utils.translation import gettext_lazy as _ + +from temba.contacts.models import URN + +from ...models import ChannelType +from .views import ClaimView + + +class TestType(ChannelType): + """ + A dummy channel type for load testing purposes + """ + + code = "TST" + name = "Test" + category = ChannelType.Category.API + schemes = [URN.EXTERNAL_SCHEME] + + claim_blurb = _("Only staff users can see this option. Used for load testing. Uses ext URNs.") + claim_view = ClaimView + + def is_available_to(self, org, user): + return user.is_staff, user.is_staff diff --git a/temba/channels/types/test/views.py b/temba/channels/types/test/views.py new file mode 100644 index 00000000000..3fcbba4bca3 --- /dev/null +++ b/temba/channels/types/test/views.py @@ -0,0 +1,32 @@ +from smartmin.views import SmartFormView + +from django import forms +from django.utils.translation import gettext_lazy as _ + +from temba.utils.views.mixins import StaffOnlyMixin + +from ...models import Channel +from ...views import ClaimViewMixin + + +class ClaimView(StaffOnlyMixin, ClaimViewMixin, SmartFormView): + class Form(ClaimViewMixin.Form): + tps = forms.IntegerField(help_text=_("TPS."), min_value=1, max_value=1000) + + form_class = Form + readonly_servicing = False + + def form_valid(self, form): + from .type import TestType + + self.object = Channel.create( + self.request.org, + self.request.user, + None, + TestType.code, + "Load Tester", + config={"send_delay_ms": 10, "error_percent": 5}, + tps=form.cleaned_data["tps"], + ) + + return super().form_valid(form) diff --git a/temba/channels/types/twilio/type.py b/temba/channels/types/twilio/type.py index e318af8fd86..02d04c5fb73 100644 --- a/temba/channels/types/twilio/type.py +++ b/temba/channels/types/twilio/type.py @@ -81,8 +81,8 @@ def deactivate(self, channel): def get_urls(self): return [ self.get_claim_url(), - re_path(r"^search$", SearchView.as_view(channel_type=self), name="search"), - re_path(r"^connect$", Connect.as_view(channel_type=self), name="connect"), + re_path(r"^search/$", SearchView.as_view(channel_type=self), name="search"), + re_path(r"^connect/$", Connect.as_view(channel_type=self), name="connect"), ] def get_error_ref_url(self, channel, code: str) -> str: diff --git a/temba/channels/types/twilio/views.py b/temba/channels/types/twilio/views.py index 0fe1493be18..1916ea3b2d9 100644 --- a/temba/channels/types/twilio/views.py +++ b/temba/channels/types/twilio/views.py @@ -13,7 +13,7 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from temba.orgs.views import OrgPermsMixin +from temba.orgs.views.mixins import OrgPermsMixin from temba.utils import countries from temba.utils.fields import InputWidget, SelectWidget from temba.utils.timezones import timezone_to_country_code diff --git a/temba/channels/types/twitter/__init__.py b/temba/channels/types/twitter/__init__.py deleted file mode 100644 index f93318b2e8e..00000000000 --- a/temba/channels/types/twitter/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .type import TwitterType # noqa diff --git a/temba/channels/types/twitter/client.py b/temba/channels/types/twitter/client.py deleted file mode 100644 index bb39ac72522..00000000000 --- a/temba/channels/types/twitter/client.py +++ /dev/null @@ -1,40 +0,0 @@ -from urllib.parse import quote_plus - -from twython import Twython - - -class TwitterClient(Twython): - def get_webhooks(self, env_name): - """ - Returns the webhooks currently active for this app. (Twitter claims there can only be one) - Docs: https://developer.twitter.com/en/docs/accounts-and-users/subscribe-account-activity/api-reference/aaa-standard-all - """ - return self.get("https://api.twitter.com/1.1/account_activity/all/%s/webhooks.json" % env_name) - - def delete_webhook(self, env_name, webhook_id): - """ - Deletes the webhook for the current app / user and passed in environment name. - Docs: https://developer.twitter.com/en/docs/accounts-and-users/subscribe-account-activity/api-reference/aaa-standard-all - """ - self.request( - "https://api.twitter.com/1.1/account_activity/all/%s/webhooks/%s.json" % (env_name, webhook_id), - method="DELETE", - ) - - def register_webhook(self, env_name, url): - """ - Registers a new webhook URL for the given application context. - Docs: https://developer.twitter.com/en/docs/accounts-and-users/subscribe-account-activity/api-reference/aaa-standard-all - """ - set_webhook_url = "https://api.twitter.com/1.1/account_activity/all/%s/webhooks.json?url=%s" % ( - env_name, - quote_plus(url), - ) - return self.post(set_webhook_url) - - def subscribe_to_webhook(self, env_name): - """ - Subscribes all user's events for this apps webhook - Docs: https://developer.twitter.com/en/docs/accounts-and-users/subscribe-account-activity/api-reference/aaa-standard-all - """ - return self.post("https://api.twitter.com/1.1/account_activity/all/%s/subscriptions.json" % env_name) diff --git a/temba/channels/types/twitter/tests.py b/temba/channels/types/twitter/tests.py deleted file mode 100644 index 114da89346b..00000000000 --- a/temba/channels/types/twitter/tests.py +++ /dev/null @@ -1,187 +0,0 @@ -from unittest.mock import patch - -from twython import TwythonError - -from django.contrib.auth.models import Group -from django.urls import reverse - -from temba.tests import TembaTest - -from ...models import Channel -from .client import TwitterClient - - -class TwitterTypeTest(TembaTest): - def setUp(self): - super().setUp() - - self.channel = self.create_channel( - "TWT", - "Twitter Beta", - "beta_bob", - config={ - "api_key": "ak1", - "api_secret": "as1", - "access_token": "at1", - "access_token_secret": "ats1", - "handle_id": "h123456", - "webhook_id": "1234567", - "env_name": "beta", - }, - ) - - @patch("temba.channels.types.twitter.client.TwitterClient.get_webhooks") - @patch("temba.channels.types.twitter.client.TwitterClient.delete_webhook") - @patch("temba.channels.types.twitter.client.TwitterClient.subscribe_to_webhook") - @patch("temba.channels.types.twitter.client.TwitterClient.register_webhook") - @patch("twython.Twython.verify_credentials") - def test_claim( - self, - mock_verify_credentials, - mock_register_webhook, - mock_subscribe_to_webhook, - mock_delete_webhook, - mock_get_webhooks, - ): - mock_get_webhooks.return_value = [{"id": "webhook_id"}] - mock_delete_webhook.return_value = {"ok", True} - - Group.objects.get(name="Beta").user_set.add(self.admin) - - url = reverse("channels.types.twitter.claim") - self.login(self.admin) - - response = self.client.get(reverse("channels.channel_claim")) - self.assertContains(response, "/channels/types/twitter/claim") - - response = self.client.get(url) - self.assertEqual(response.status_code, 200) - self.assertContains(response, "Connect Twitter") - - self.assertEqual( - list(response.context["form"].fields.keys()), - ["api_key", "api_secret", "access_token", "access_token_secret", "env_name", "loc"], - ) - - # try submitting empty form - response = self.client.post(url, {}) - self.assertEqual(response.status_code, 200) - self.assertFormError(response.context["form"], "api_key", "This field is required.") - self.assertFormError(response.context["form"], "api_secret", "This field is required.") - self.assertFormError(response.context["form"], "access_token", "This field is required.") - self.assertFormError(response.context["form"], "access_token_secret", "This field is required.") - - # try submitting with invalid credentials - mock_verify_credentials.side_effect = TwythonError("Invalid credentials") - - response = self.client.post( - url, {"api_key": "ak", "api_secret": "as", "access_token": "at", "access_token_secret": "ats"} - ) - self.assertEqual(response.status_code, 200) - self.assertFormError( - response.context["form"], None, "The provided Twitter credentials do not appear to be valid." - ) - - # error registering webhook - mock_verify_credentials.return_value = {"id": "87654", "screen_name": "jimmy"} - mock_verify_credentials.side_effect = None - mock_register_webhook.side_effect = TwythonError("Exceeded number of webhooks") - - response = self.client.post( - url, - { - "api_key": "ak", - "api_secret": "as", - "access_token": "at", - "access_token_secret": "ats", - "env_name": "production", - }, - ) - self.assertEqual(response.status_code, 200) - self.assertFormError(response.context["form"], None, "Exceeded number of webhooks") - - # try a valid submission - mock_register_webhook.side_effect = None - mock_register_webhook.return_value = {"id": "1234567"} - - response = self.client.post( - url, - { - "api_key": "ak", - "api_secret": "as", - "access_token": "at", - "access_token_secret": "ats", - "env_name": "beta", - }, - ) - self.assertEqual(response.status_code, 302) - - channel = Channel.objects.get(address="jimmy", is_active=True) - self.assertEqual( - channel.config, - { - "handle_id": "87654", - "api_key": "ak", - "api_secret": "as", - "access_token": "at", - "env_name": "beta", - "access_token_secret": "ats", - "webhook_id": "1234567", - "callback_domain": channel.callback_domain, - }, - ) - - mock_register_webhook.assert_called_with( - "beta", "https://%s/c/twt/%s/receive" % (channel.callback_domain, channel.uuid) - ) - mock_subscribe_to_webhook.assert_called_with("beta") - - @patch("temba.channels.types.twitter.client.TwitterClient.delete_webhook") - def test_release(self, mock_delete_webhook): - self.channel.release(self.admin) - mock_delete_webhook.assert_called_once_with("beta", "1234567") - - -class TwitterClientTest(TembaTest): - def setUp(self): - super().setUp() - - self.client = TwitterClient("APIKEY", "APISECRET", "ACCESSTOKEN", "ACCESSTOKENSECRET") - - @patch("twython.Twython.request") - def test_get_webhooks(self, mock_request): - self.client.get_webhooks("temba") - - mock_request.assert_called_once_with( - "https://api.twitter.com/1.1/account_activity/all/temba/webhooks.json", params=None, version="1.1" - ) - - @patch("twython.Twython.request") - def test_delete_webhook(self, mock_request): - self.client.delete_webhook("temba", "1234") - - mock_request.assert_called_once_with( - "https://api.twitter.com/1.1/account_activity/all/temba/webhooks/1234.json", method="DELETE" - ) - - @patch("twython.Twython.request") - def test_register_webhook(self, mock_request): - self.client.register_webhook("temba", "http://temba.com/mycallback.asp") - - mock_request.assert_called_once_with( - "https://api.twitter.com/1.1/account_activity/all/temba/webhooks.json?url=http%3A%2F%2Ftemba.com%2Fmycallback.asp", - "POST", - params=None, - version="1.1", - ) - - @patch("twython.Twython.request") - def test_subscribe_to_webhook(self, mock_request): - self.client.subscribe_to_webhook("temba") - - mock_request.assert_called_once_with( - "https://api.twitter.com/1.1/account_activity/all/temba/subscriptions.json", - "POST", - params=None, - version="1.1", - ) diff --git a/temba/channels/types/twitter/type.py b/temba/channels/types/twitter/type.py deleted file mode 100644 index aacaa6ff954..00000000000 --- a/temba/channels/types/twitter/type.py +++ /dev/null @@ -1,68 +0,0 @@ -import logging - -from django.forms import ValidationError -from django.urls import reverse -from django.utils.translation import gettext_lazy as _ - -from temba.contacts.models import URN - -from ...models import ChannelType -from .client import TwitterClient -from .views import ClaimView, UpdateForm - -logger = logging.getLogger(__name__) - - -class TwitterType(ChannelType): - """ - A Twitter channel which uses Twitter's Account Activity API to send and receive direct messages. - """ - - code = "TWT" - name = "Twitter" - category = ChannelType.Category.SOCIAL_MEDIA - beta_only = True - - courier_url = r"^twt/(?P[a-z0-9\-]+)/receive$" - schemes = [URN.TWITTER_SCHEME, URN.TWITTERID_SCHEME] - async_activation = False - redact_response_keys = ("urn",) - redact_request_keys = ("sender_id", "name", "screen_name", "profile_image_url", "profile_image_url_https") - - claim_blurb = _( - "Send and receive messages on Twitter using their %(link)s API. You will have to apply for Twitter API access " - "and create a Twitter application." - ) % { - "link": 'Twitter Activity' - } - claim_view = ClaimView - update_form = UpdateForm - - def activate(self, channel): - config = channel.config - client = TwitterClient( - config["api_key"], config["api_secret"], config["access_token"], config["access_token_secret"] - ) - - callback_url = "https://%s%s" % (channel.callback_domain, reverse("courier.twt", args=[channel.uuid])) - try: - # check for existing hooks, if there is just one, remove it - hooks = client.get_webhooks(config["env_name"]) - if len(hooks) == 1: - client.delete_webhook(config["env_name"], hooks[0]["id"]) - - resp = client.register_webhook(config["env_name"], callback_url) - channel.config["webhook_id"] = resp["id"] - channel.save(update_fields=["config"]) - client.subscribe_to_webhook(config["env_name"]) - except Exception as e: # pragma: no cover - logger.error(f"Unable to activate TwitterActivity: {str(e)}", exc_info=True) - raise ValidationError(e) - - def deactivate(self, channel): - config = channel.config - if "webhook_id" in config: - client = TwitterClient( - config["api_key"], config["api_secret"], config["access_token"], config["access_token_secret"] - ) - client.delete_webhook(config["env_name"], config["webhook_id"]) diff --git a/temba/channels/types/twitter/views.py b/temba/channels/types/twitter/views.py deleted file mode 100644 index 2c09fdfb835..00000000000 --- a/temba/channels/types/twitter/views.py +++ /dev/null @@ -1,87 +0,0 @@ -from smartmin.views import SmartFormView -from twython import TwythonError - -from django import forms -from django.conf import settings -from django.core.exceptions import ValidationError -from django.utils.translation import gettext_lazy as _ - -from temba.utils.views import NonAtomicMixin - -from ...models import Channel -from ...views import ClaimViewMixin, UpdateChannelForm -from .client import TwitterClient - - -class ClaimView(NonAtomicMixin, ClaimViewMixin, SmartFormView): - class Form(ClaimViewMixin.Form): - api_key = forms.CharField(label=_("Consumer API Key")) - api_secret = forms.CharField(label=_("Consumer API Secret Key")) - access_token = forms.CharField(label=_("Access Token")) - access_token_secret = forms.CharField(label=_("Access Token Secret")) - env_name = forms.CharField(label=_("Environment Name")) - - def clean(self): - cleaned_data = super().clean() - api_key = cleaned_data.get("api_key") - api_secret = cleaned_data.get("api_secret") - access_token = cleaned_data.get("access_token") - access_token_secret = cleaned_data.get("access_token_secret") - - if api_key and api_secret and access_token and access_token_secret: - client = TwitterClient(api_key, api_secret, access_token, access_token_secret) - try: - client.verify_credentials() - except TwythonError: - raise ValidationError(_("The provided Twitter credentials do not appear to be valid.")) - - return cleaned_data - - form_class = Form - - def form_valid(self, form): - cleaned_data = form.cleaned_data - api_key = cleaned_data["api_key"] - api_secret = cleaned_data["api_secret"] - access_token = cleaned_data["access_token"] - access_token_secret = cleaned_data["access_token_secret"] - env_name = cleaned_data["env_name"] - - client = TwitterClient(api_key, api_secret, access_token, access_token_secret) - account_info = client.verify_credentials() - handle_id = str(account_info["id"]) - screen_name = account_info["screen_name"] - - config = { - "handle_id": handle_id, - "api_key": api_key, - "api_secret": api_secret, - "access_token": access_token, - "access_token_secret": access_token_secret, - "env_name": env_name, - Channel.CONFIG_CALLBACK_DOMAIN: settings.HOSTNAME, - } - - try: - self.object = Channel.create( - self.request.org, - self.request.user, - None, - self.channel_type, - name="@%s" % screen_name, - address=screen_name, - config=config, - ) - except ValidationError as e: - self.form.add_error(None, e) - return self.form_invalid(form) - - return super().form_valid(form) - - -class UpdateForm(UpdateChannelForm): - class Meta(UpdateChannelForm.Meta): - fields = "name", "address", "log_policy" - readonly = ("address",) - labels = {"address": _("Handle")} - helps = {"address": _("Twitter handle of this channel")} diff --git a/temba/channels/types/vonage/client.py b/temba/channels/types/vonage/client.py index 258f7683315..2b94f61c357 100644 --- a/temba/channels/types/vonage/client.py +++ b/temba/channels/types/vonage/client.py @@ -17,7 +17,7 @@ def __init__(self, api_key: str, api_secret: str): def check_credentials(self) -> bool: try: - self.base.get_balance() + self.base.account.get_balance() return True except vonage.AuthenticationError: return False @@ -27,13 +27,13 @@ def get_numbers(self, pattern: str = None, size: int = 10) -> list: if pattern: params["pattern"] = str(pattern).strip("+") - response = self._with_retry(self.base.get_account_numbers, params=params) + response = self._with_retry(self.base.numbers.get_account_numbers, params=params) return response["numbers"] if int(response.get("count", 0)) else [] def search_numbers(self, country, pattern): response = self._with_retry( - self.base.get_available_numbers, + self.base.numbers.get_available_numbers, country_code=country, pattern=pattern, search_pattern=1, @@ -46,7 +46,7 @@ def search_numbers(self, country, pattern): numbers += response["numbers"] response = self._with_retry( - self.base.get_available_numbers, + self.base.numbers.get_available_numbers, country_code=country, pattern=pattern, search_pattern=1, @@ -62,7 +62,7 @@ def search_numbers(self, country, pattern): def buy_number(self, country, number): params = dict(msisdn=number.lstrip("+"), country=country) - self._with_retry(self.base.buy_number, params=params) + self._with_retry(self.base.numbers.buy_number, params=params) def update_number(self, country, number, mo_url, app_id): number = number.lstrip("+") @@ -71,7 +71,7 @@ def update_number(self, country, number, mo_url, app_id): if app_id: params["app_id"] = app_id - self._with_retry(self.base.update_number, params=params) + self._with_retry(self.base.numbers.update_number, params=params) def create_application(self, domain, channel_uuid): name = "%s/%s" % (domain, channel_uuid) @@ -90,7 +90,7 @@ def create_application(self, domain, channel_uuid): }, } - response = self._with_retry(self.base.application_v2.create_application, application_data=app_data) + response = self._with_retry(self.base.application.create_application, application_data=app_data) app_id = response.get("id") app_private_key = response.get("keys", {}).get("private_key") @@ -98,7 +98,7 @@ def create_application(self, domain, channel_uuid): def delete_application(self, app_id): try: - self._with_retry(self.base.application_v2.delete_application, application_id=app_id) + self._with_retry(self.base.application.delete_application, application_id=app_id) except vonage.ClientError: # possible application no longer exists pass diff --git a/temba/channels/types/vonage/tests.py b/temba/channels/types/vonage/tests.py index 0282b290b30..b259ff3ffad 100644 --- a/temba/channels/types/vonage/tests.py +++ b/temba/channels/types/vonage/tests.py @@ -261,7 +261,7 @@ def test_deactivate(self): channel.save(update_fields=("channel_type", "config")) # mock a 404 response from Vonage during deactivation - with patch("vonage.ApplicationV2.delete_application") as mock_delete_application: + with patch("vonage.application.Application.delete_application") as mock_delete_application: mock_delete_application.side_effect = vonage.ClientError("404 response") # releasing shouldn't blow up on auth failures @@ -297,7 +297,7 @@ def setUp(self): self.client = VonageClient("abc123", "asecret") - @patch("vonage.Client.get_balance") + @patch("vonage.account.Account.get_balance") def test_check_credentials(self, mock_get_balance): mock_get_balance.side_effect = vonage.AuthenticationError("401 not allowed") @@ -308,7 +308,7 @@ def test_check_credentials(self, mock_get_balance): self.assertTrue(self.client.check_credentials()) - @patch("vonage.Client.get_account_numbers") + @patch("vonage.number_management.Numbers.get_account_numbers") def test_get_numbers(self, mock_get_account_numbers): mock_get_account_numbers.return_value = {"count": 2, "numbers": ["23463", "568658"]} @@ -316,7 +316,7 @@ def test_get_numbers(self, mock_get_account_numbers): mock_get_account_numbers.assert_called_once_with(params={"size": 10, "pattern": "593"}) - @patch("vonage.Client.get_available_numbers") + @patch("vonage.number_management.Numbers.get_available_numbers") def test_search_numbers(self, mock_get_available_numbers): mock_get_available_numbers.side_effect = [ {"count": 2, "numbers": ["23463", "568658"]}, @@ -332,13 +332,13 @@ def test_search_numbers(self, mock_get_available_numbers): ] ) - @patch("vonage.Client.buy_number") + @patch("vonage.number_management.Numbers.buy_number") def test_buy_number(self, mock_buy_number): self.client.buy_number(country="US", number="+12345") mock_buy_number.assert_called_once_with(params={"msisdn": "12345", "country": "US"}) - @patch("vonage.Client.update_number") + @patch("vonage.number_management.Numbers.update_number") def test_update_number(self, mock_update_number): self.client.update_number(country="US", number="+12345", mo_url="http://test", app_id="ID123") @@ -346,7 +346,7 @@ def test_update_number(self, mock_update_number): params={"moHttpUrl": "http://test", "msisdn": "12345", "country": "US", "app_id": "ID123"} ) - @patch("vonage.ApplicationV2.create_application") + @patch("vonage.application.Application.create_application") def test_create_application(self, mock_create_application): mock_create_application.return_value = {"id": "myappid", "keys": {"private_key": "tejh42gf3"}} @@ -374,7 +374,7 @@ def test_create_application(self, mock_create_application): mock_create_application.assert_called_once_with(application_data=app_data) - @patch("vonage.ApplicationV2.delete_application") + @patch("vonage.application.Application.delete_application") def test_delete_application(self, mock_delete_application): self.client.delete_application("myappid") diff --git a/temba/channels/types/vonage/type.py b/temba/channels/types/vonage/type.py index 668029e34e8..30b75f4ddbb 100644 --- a/temba/channels/types/vonage/type.py +++ b/temba/channels/types/vonage/type.py @@ -34,7 +34,7 @@ class VonageType(ChannelType): """ A Vonage (formerly Nexmo) channel - Callback status information (https://developer.nexmo.com/api/voice#status-values): + Callback status information (https://developer.vonage.com/en/api/voice#status-values): started: Platform has started the call. ringing: The user's handset is ringing. diff --git a/temba/channels/types/vonage/views.py b/temba/channels/types/vonage/views.py index 1b6bbe568fd..8ae2e5504a1 100644 --- a/temba/channels/types/vonage/views.py +++ b/temba/channels/types/vonage/views.py @@ -7,7 +7,7 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from temba.orgs.views import OrgPermsMixin +from temba.orgs.views.mixins import OrgPermsMixin from temba.utils import countries from temba.utils.fields import InputWidget, SelectWidget from temba.utils.models import generate_uuid diff --git a/temba/channels/types/whatsapp/tests.py b/temba/channels/types/whatsapp/tests.py index d15b4bd9034..b13dc575e91 100644 --- a/temba/channels/types/whatsapp/tests.py +++ b/temba/channels/types/whatsapp/tests.py @@ -8,7 +8,7 @@ from temba.request_logs.models import HTTPLog from temba.tests import MockJsonResponse, MockResponse, TembaTest -from temba.utils.views import TEMBA_MENU_SELECTION +from temba.utils.views.mixins import TEMBA_MENU_SELECTION from ...models import Channel from .type import WhatsAppType @@ -20,6 +20,7 @@ class WhatsAppTypeTest(TembaTest): FACEBOOK_APPLICATION_SECRET="FB_APP_SECRET", WHATSAPP_FACEBOOK_BUSINESS_ID="FB_BUSINESS_ID", WHATSAPP_ADMIN_SYSTEM_USER_TOKEN="WA_ADMIN_TOKEN", + FACEBOOK_LOGIN_WHATSAPP_CONFIG_ID="100", ) @patch("temba.channels.types.whatsapp.views.randint") def test_claim(self, mock_randint): @@ -34,39 +35,32 @@ def test_claim(self, mock_randint): connect_whatsapp_cloud_url = reverse("channels.types.whatsapp.connect") claim_whatsapp_cloud_url = reverse("channels.types.whatsapp.claim") - # make sure plivo is on the claim page + # make sure type is not listed the claim page response = self.client.get(reverse("channels.channel_claim")) - self.assertEqual(200, response.status_code) self.assertNotContains(response, claim_whatsapp_cloud_url) - with patch("requests.get") as wa_cloud_get: - wa_cloud_get.return_value = MockJsonResponse(400, {}) - response = self.client.get(claim_whatsapp_cloud_url) - - self.assertEqual(response.status_code, 302) - - response = self.client.get(claim_whatsapp_cloud_url, follow=True) - - self.assertEqual(response.request["PATH_INFO"], "/users/login/") + # or directly accessible + self.assertRedirect(self.client.get(claim_whatsapp_cloud_url), connect_whatsapp_cloud_url) + self.assertLoginRedirect(self.client.get(connect_whatsapp_cloud_url)) self.make_beta(self.admin) - with patch("requests.get") as wa_cloud_get: - wa_cloud_get.return_value = MockJsonResponse(400, {}) - response = self.client.get(claim_whatsapp_cloud_url) - self.assertEqual(response.status_code, 302) - - response = self.client.get(claim_whatsapp_cloud_url, follow=True) - - self.assertEqual(response.request["PATH_INFO"], connect_whatsapp_cloud_url) + response = self.client.get(reverse("channels.channel_claim")) + self.assertContains(response, claim_whatsapp_cloud_url) + self.assertRedirect(self.client.get(claim_whatsapp_cloud_url), connect_whatsapp_cloud_url) - with patch("requests.get") as wa_cloud_get: + with patch("requests.get") as wa_cloud_get, patch("requests.post") as wa_cloud_post: wa_cloud_get.side_effect = [ MockJsonResponse(400, {}), + # debug not valid + MockJsonResponse( + 200, + {"data": {"scopes": [], "is_valid": False}}, + ), # missing permissions MockJsonResponse( 200, - {"data": {"scopes": []}}, + {"data": {"scopes": [], "is_valid": True}}, ), # success MockJsonResponse( @@ -77,7 +71,8 @@ def test_claim(self, mock_randint): "business_management", "whatsapp_business_management", "whatsapp_business_messaging", - ] + ], + "is_valid": True, } }, ), @@ -89,7 +84,8 @@ def test_claim(self, mock_randint): "business_management", "whatsapp_business_management", "whatsapp_business_messaging", - ] + ], + "is_valid": True, } }, ), @@ -101,24 +97,37 @@ def test_claim(self, mock_randint): "business_management", "whatsapp_business_management", "whatsapp_business_messaging", - ] + ], + "is_valid": True, } }, ), ] + + wa_cloud_post.return_value = MockResponse(200, json.dumps({"access_token": "Z" * 48})) + response = self.client.get(connect_whatsapp_cloud_url) self.assertEqual(response.status_code, 200) # 400 status response = self.client.post(connect_whatsapp_cloud_url, dict(user_access_token="X" * 36), follow=True) self.assertEqual( - response.context["form"].errors["__all__"][0], "Sorry account could not be connected. Please try again" + response.context["form"].errors["__all__"][0], + "Sorry account could not be connected. Please try again", + ) + + # 200 but has invalid key + response = self.client.post(connect_whatsapp_cloud_url, dict(user_access_token="X" * 36), follow=True) + self.assertEqual( + response.context["form"].errors["__all__"][0], + "Sorry account could not be connected. Please try again", ) # missing permissions response = self.client.post(connect_whatsapp_cloud_url, dict(user_access_token="X" * 36), follow=True) self.assertEqual( - response.context["form"].errors["__all__"][0], "Sorry account could not be connected. Please try again" + response.context["form"].errors["__all__"][0], + "Sorry account could not be connected. Please try again", ) response = self.client.post(connect_whatsapp_cloud_url, dict(user_access_token="X" * 36)) @@ -131,7 +140,7 @@ def test_claim(self, mock_randint): self.assertEqual(wa_cloud_get.call_args_list[0][0][0], "https://graph.facebook.com/v18.0/debug_token") self.assertEqual( wa_cloud_get.call_args_list[0][1], - {"params": {"access_token": "FB_APP_ID|FB_APP_SECRET", "input_token": "X" * 36}}, + {"params": {"access_token": "FB_APP_ID|FB_APP_SECRET", "input_token": "Z" * 48}}, ) # make sure the token is set on the session @@ -141,28 +150,28 @@ def test_claim(self, mock_randint): self.assertIn(WhatsAppType.SESSION_USER_TOKEN, self.client.session) - with patch("requests.get") as wa_cloud_get: - with patch("requests.post") as wa_cloud_post: - wa_cloud_get.side_effect = [ - # pre-process missing permissions - MockResponse( - 200, - json.dumps( - { - "data": { - "scopes": [ - "business_management", - "whatsapp_business_messaging", - ] - } + with patch("requests.get") as wa_cloud_get, patch("requests.post") as wa_cloud_post: + wa_cloud_get.side_effect = [ + # pre-process missing permissions + MockResponse( + 200, + json.dumps( + { + "data": { + "scopes": [ + "business_management", + "whatsapp_business_messaging", + ], + "is_valid": True, } - ), + } ), - ] + ), + ] - response = self.client.get(claim_whatsapp_cloud_url, follow=True) + response = self.client.get(claim_whatsapp_cloud_url, follow=True) - self.assertFalse(WhatsAppType.SESSION_USER_TOKEN in self.client.session) + self.assertFalse(WhatsAppType.SESSION_USER_TOKEN in self.client.session) # make sure the token is set on the session session = self.client.session @@ -171,219 +180,216 @@ def test_claim(self, mock_randint): self.assertIn(WhatsAppType.SESSION_USER_TOKEN, self.client.session) - with patch("requests.get") as wa_cloud_get: - with patch("requests.post") as wa_cloud_post: - wa_cloud_get.side_effect = [ - # pre-process for get - MockResponse( - 200, - json.dumps( - { - "data": { - "scopes": [ - "business_management", - "whatsapp_business_management", - "whatsapp_business_messaging", - ] - } - } - ), - ), - # getting target waba - MockResponse( - 200, - json.dumps( - { - "data": { - "granular_scopes": [ - { - "scope": "business_management", - "target_ids": [ - "2222222222222", - ], - }, - { - "scope": "whatsapp_business_management", - "target_ids": [ - "111111111111111", - ], - }, - { - "scope": "whatsapp_business_messaging", - "target_ids": [ - "111111111111111", - ], - }, - ] - } - } - ), - ), - # getting waba details - MockResponse( - 200, - json.dumps( - { - "id": "111111111111111", - "currency": "USD", - "message_template_namespace": "namespace-uuid", - "on_behalf_of_business_info": {"id": "2222222222222"}, + with patch("requests.get") as wa_cloud_get, patch("requests.post") as wa_cloud_post: + wa_cloud_get.side_effect = [ + # pre-process for get + MockResponse( + 200, + json.dumps( + { + "data": { + "scopes": [ + "business_management", + "whatsapp_business_management", + "whatsapp_business_messaging", + ], + "is_valid": True, } - ), + } ), - # getting waba phone numbers - MockResponse( - 200, - json.dumps( - { - "data": [ + ), + # getting target waba + MockResponse( + 200, + json.dumps( + { + "data": { + "granular_scopes": [ + { + "scope": "business_management", + "target_ids": [ + "2222222222222", + ], + }, + { + "scope": "whatsapp_business_management", + "target_ids": [ + "111111111111111", + ], + }, { - "id": "123123123", - "display_phone_number": "1234", - "verified_name": "Long WABA name" + " foobar" * 20, - } + "scope": "whatsapp_business_messaging", + "target_ids": [ + "111111111111111", + ], + }, ] } - ), + } + ), + ), + # getting waba details + MockResponse( + 200, + json.dumps( + { + "id": "111111111111111", + "currency": "USD", + "message_template_namespace": "namespace-uuid", + "on_behalf_of_business_info": {"id": "2222222222222"}, + } ), - # pre-process for post - MockResponse( - 200, - json.dumps( - { - "data": { - "scopes": [ - "business_management", - "whatsapp_business_management", - "whatsapp_business_messaging", - ] + ), + # getting waba phone numbers + MockResponse( + 200, + json.dumps( + { + "data": [ + { + "id": "123123123", + "display_phone_number": "1234", + "verified_name": "Long WABA name" + " foobar" * 20, } - } - ), + ] + } ), - # getting te credit line ID - MockResponse(200, json.dumps({"data": [{"id": "567567567"}]})), - # phone number verification status - MockResponse( - 200, - json.dumps( - { - "verified_name": "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar", - "code_verification_status": "VERIFIED", - "display_phone_number": "1234", - "quality_rating": "GREEN", - "id": "123123123", + ), + # pre-process for post + MockResponse( + 200, + json.dumps( + { + "data": { + "scopes": [ + "business_management", + "whatsapp_business_management", + "whatsapp_business_messaging", + ], + "is_valid": True, } - ), + } ), - ] - - wa_cloud_post.return_value = MockResponse(200, json.dumps({"success": "true"})) - - response = self.client.get(claim_whatsapp_cloud_url, follow=True) - - self.assertEqual(len(response.context["phone_numbers"]), 1) - self.assertEqual(response.context["phone_numbers"][0]["waba_id"], "111111111111111") - self.assertEqual(response.context["phone_numbers"][0]["phone_number_id"], "123123123") - self.assertEqual(response.context["phone_numbers"][0]["business_id"], "2222222222222") - self.assertEqual(response.context["phone_numbers"][0]["currency"], "USD") - self.assertEqual( - response.context["phone_numbers"][0]["verified_name"], - "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar", - ) - - post_data = response.context["form"].initial - post_data["number"] = "1234" - post_data["verified_name"] = ( - "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar" - ) - post_data["phone_number_id"] = "123123123" - post_data["waba_id"] = "111111111111111" - post_data["business_id"] = "2222222222222" - post_data["currency"] = "USD" - post_data["message_template_namespace"] = "namespace-uuid" - - response = self.client.post(claim_whatsapp_cloud_url, post_data, follow=True) - self.assertEqual(200, response.status_code) - - self.assertNotIn(WhatsAppType.SESSION_USER_TOKEN, self.client.session) - - self.assertEqual(3, wa_cloud_post.call_count) - - self.assertEqual( - "https://graph.facebook.com/v18.0/111111111111111/assigned_users", - wa_cloud_post.call_args_list[0][0][0], - ) - self.assertEqual( - {"Authorization": "Bearer WA_ADMIN_TOKEN"}, wa_cloud_post.call_args_list[0][1]["headers"] - ) - - self.assertEqual( - "https://graph.facebook.com/v18.0/111111111111111/subscribed_apps", - wa_cloud_post.call_args_list[1][0][0], - ) - - self.assertEqual( - "https://graph.facebook.com/v18.0/123123123/register", wa_cloud_post.call_args_list[2][0][0] - ) - self.assertEqual( - {"messaging_product": "whatsapp", "pin": "111111"}, wa_cloud_post.call_args_list[2][1]["data"] - ) - - channel = Channel.objects.get() - - self.assertEqual( - response.request["PATH_INFO"], - reverse("channels.channel_read", args=(channel.uuid,)), - ) - - self.assertEqual("1234 - Long WABA name foobar foobar foobar foobar foobar foob...", channel.name) - self.assertEqual("123123123", channel.address) - self.assertEqual("WAC", channel.channel_type) - - self.assertEqual("1234", channel.config["wa_number"]) - self.assertEqual( - "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar", - channel.config["wa_verified_name"], - ) - self.assertEqual("111111111111111", channel.config["wa_waba_id"]) - self.assertEqual("USD", channel.config["wa_currency"]) - self.assertEqual("2222222222222", channel.config["wa_business_id"]) - self.assertEqual("111111", channel.config["wa_pin"]) - self.assertEqual("namespace-uuid", channel.config["wa_message_template_namespace"]) - self.assertEqual("WAC", channel.type.code) - self.assertEqual("whatsapp", channel.template_type.slug) - - response = self.client.get(reverse("channels.types.whatsapp.request_code", args=(channel.uuid,))) - self.assertEqual(200, response.status_code) - - response = self.client.get(reverse("channels.types.whatsapp.request_code", args=(channel.uuid,))) - self.assertEqual(200, response.status_code) - self.assertEqual(f"/settings/channels/{channel.uuid}", response.context[TEMBA_MENU_SELECTION]) - - # request verification code - response = self.client.post( - reverse("channels.types.whatsapp.request_code", args=(channel.uuid,)), dict(), follow=True - ) - self.assertEqual(200, response.status_code) - - self.assertEqual( - "https://graph.facebook.com/v18.0/123123123/request_code", wa_cloud_post.call_args[0][0] - ) - - # submit verification code - response = self.client.post( - reverse("channels.types.whatsapp.verify_code", args=(channel.uuid,)), - dict(code="000000"), - follow=True, - ) - self.assertEqual(200, response.status_code) - - self.assertEqual("https://graph.facebook.com/v18.0/123123123/register", wa_cloud_post.call_args[0][0]) - self.assertEqual({"messaging_product": "whatsapp", "pin": "111111"}, wa_cloud_post.call_args[1]["data"]) - - response = self.client.get(reverse("channels.types.whatsapp.verify_code", args=(channel.uuid,))) - self.assertEqual(f"/settings/channels/{channel.uuid}", response.context[TEMBA_MENU_SELECTION]) + ), + # getting te credit line ID + MockResponse(200, json.dumps({"data": [{"id": "567567567"}]})), + # phone number verification status + MockResponse( + 200, + json.dumps( + { + "verified_name": "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar", + "code_verification_status": "VERIFIED", + "display_phone_number": "1234", + "quality_rating": "GREEN", + "id": "123123123", + } + ), + ), + ] + + wa_cloud_post.return_value = MockResponse(200, json.dumps({"success": "true"})) + + response = self.client.get(claim_whatsapp_cloud_url, follow=True) + + self.assertEqual(len(response.context["phone_numbers"]), 1) + self.assertEqual(response.context["phone_numbers"][0]["waba_id"], "111111111111111") + self.assertEqual(response.context["phone_numbers"][0]["phone_number_id"], "123123123") + self.assertEqual(response.context["phone_numbers"][0]["business_id"], "2222222222222") + self.assertEqual(response.context["phone_numbers"][0]["currency"], "USD") + self.assertEqual( + response.context["phone_numbers"][0]["verified_name"], + "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar", + ) + + post_data = response.context["form"].initial + post_data["number"] = "1234" + post_data["verified_name"] = ( + "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar" + ) + post_data["phone_number_id"] = "123123123" + post_data["waba_id"] = "111111111111111" + post_data["business_id"] = "2222222222222" + post_data["currency"] = "USD" + post_data["message_template_namespace"] = "namespace-uuid" + + response = self.client.post(claim_whatsapp_cloud_url, post_data, follow=True) + self.assertEqual(200, response.status_code) + + self.assertNotIn(WhatsAppType.SESSION_USER_TOKEN, self.client.session) + + self.assertEqual(3, wa_cloud_post.call_count) + + self.assertEqual( + "https://graph.facebook.com/v18.0/111111111111111/assigned_users", + wa_cloud_post.call_args_list[0][0][0], + ) + self.assertEqual({"Authorization": "Bearer WA_ADMIN_TOKEN"}, wa_cloud_post.call_args_list[0][1]["headers"]) + + self.assertEqual( + "https://graph.facebook.com/v18.0/111111111111111/subscribed_apps", + wa_cloud_post.call_args_list[1][0][0], + ) + + self.assertEqual( + "https://graph.facebook.com/v18.0/123123123/register", wa_cloud_post.call_args_list[2][0][0] + ) + self.assertEqual( + {"messaging_product": "whatsapp", "pin": "111111"}, wa_cloud_post.call_args_list[2][1]["data"] + ) + + channel = Channel.objects.get() + + self.assertEqual( + response.request["PATH_INFO"], + reverse("channels.channel_read", args=(channel.uuid,)), + ) + + self.assertEqual("1234 - Long WABA name foobar foobar foobar foobar foobar foob...", channel.name) + self.assertEqual("123123123", channel.address) + self.assertEqual("WAC", channel.channel_type) + + self.assertEqual("1234", channel.config["wa_number"]) + self.assertEqual( + "Long WABA name foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar foobar", + channel.config["wa_verified_name"], + ) + self.assertEqual("111111111111111", channel.config["wa_waba_id"]) + self.assertEqual("USD", channel.config["wa_currency"]) + self.assertEqual("2222222222222", channel.config["wa_business_id"]) + self.assertEqual("111111", channel.config["wa_pin"]) + self.assertEqual("namespace-uuid", channel.config["wa_message_template_namespace"]) + self.assertEqual("WAC", channel.type.code) + self.assertEqual("whatsapp", channel.template_type.slug) + + response = self.client.get(reverse("channels.types.whatsapp.request_code", args=(channel.uuid,))) + self.assertEqual(200, response.status_code) + + response = self.client.get(reverse("channels.types.whatsapp.request_code", args=(channel.uuid,))) + self.assertEqual(200, response.status_code) + self.assertEqual(f"/settings/channels/{channel.uuid}", response.context[TEMBA_MENU_SELECTION]) + + # request verification code + response = self.client.post( + reverse("channels.types.whatsapp.request_code", args=(channel.uuid,)), dict(), follow=True + ) + self.assertEqual(200, response.status_code) + + self.assertEqual("https://graph.facebook.com/v18.0/123123123/request_code", wa_cloud_post.call_args[0][0]) + + # submit verification code + response = self.client.post( + reverse("channels.types.whatsapp.verify_code", args=(channel.uuid,)), + dict(code="000000"), + follow=True, + ) + self.assertEqual(200, response.status_code) + + self.assertEqual("https://graph.facebook.com/v18.0/123123123/register", wa_cloud_post.call_args[0][0]) + self.assertEqual({"messaging_product": "whatsapp", "pin": "111111"}, wa_cloud_post.call_args[1]["data"]) + + response = self.client.get(reverse("channels.types.whatsapp.verify_code", args=(channel.uuid,))) + self.assertEqual(f"/settings/channels/{channel.uuid}", response.context[TEMBA_MENU_SELECTION]) # make sure the token is set on the session session = self.client.session @@ -392,161 +398,154 @@ def test_claim(self, mock_randint): self.assertIn(WhatsAppType.SESSION_USER_TOKEN, self.client.session) - with patch("requests.get") as wa_cloud_get: - with patch("requests.post") as wa_cloud_post: - wa_cloud_get.side_effect = [ - # pre-process for get - MockResponse( - 200, - json.dumps( - { - "data": { - "scopes": [ - "business_management", - "whatsapp_business_management", - "whatsapp_business_messaging", - ] - } - } - ), - ), - # getting target waba - MockResponse( - 200, - json.dumps( - { - "data": { - "granular_scopes": [ - { - "scope": "business_management", - "target_ids": [ - "2222222222222", - ], - }, - { - "scope": "whatsapp_business_management", - "target_ids": [ - "111111111111111", - ], - }, - { - "scope": "whatsapp_business_messaging", - "target_ids": [ - "111111111111111", - ], - }, - ] - } - } - ), - ), - # getting waba details - MockResponse( - 200, - json.dumps( - { - "id": "111111111111111", - "currency": "USD", - "message_template_namespace": "namespace-uuid", - "owner_business_info": {"id": "2222222222222"}, + with patch("requests.get") as wa_cloud_get, patch("requests.post") as wa_cloud_post: + wa_cloud_get.side_effect = [ + # pre-process for get + MockResponse( + 200, + json.dumps( + { + "data": { + "scopes": [ + "business_management", + "whatsapp_business_management", + "whatsapp_business_messaging", + ], + "is_valid": True, } - ), + } ), - # getting waba phone numbers - MockResponse( - 200, - json.dumps( - { - "data": [ - {"id": "123123123", "display_phone_number": "1234", "verified_name": "WABA name"} + ), + # getting target waba + MockResponse( + 200, + json.dumps( + { + "data": { + "granular_scopes": [ + { + "scope": "business_management", + "target_ids": [ + "2222222222222", + ], + }, + { + "scope": "whatsapp_business_management", + "target_ids": [ + "111111111111111", + ], + }, + { + "scope": "whatsapp_business_messaging", + "target_ids": [ + "111111111111111", + ], + }, ] } - ), + } ), - # pre-process for post - MockResponse( - 200, - json.dumps( - { - "data": { - "scopes": [ - "business_management", - "whatsapp_business_management", - "whatsapp_business_messaging", - ] - } - } - ), + ), + # getting waba details + MockResponse( + 200, + json.dumps( + { + "id": "111111111111111", + "currency": "USD", + "message_template_namespace": "namespace-uuid", + "owner_business_info": {"id": "2222222222222"}, + } ), - # getting target waba - MockResponse( - 200, - json.dumps( - { - "data": { - "granular_scopes": [ - { - "scope": "business_management", - "target_ids": [ - "2222222222222", - ], - }, - { - "scope": "whatsapp_business_management", - "target_ids": [ - "111111111111111", - ], - }, - { - "scope": "whatsapp_business_messaging", - "target_ids": [ - "111111111111111", - ], - }, - ] - } - } - ), + ), + # getting waba phone numbers + MockResponse( + 200, + json.dumps( + {"data": [{"id": "123123123", "display_phone_number": "1234", "verified_name": "WABA name"}]} ), - # getting waba details - MockResponse( - 200, - json.dumps( - { - "id": "111111111111111", - "currency": "USD", - "message_template_namespace": "namespace-uuid", - "owner_business_info": {"id": "2222222222222"}, + ), + # pre-process for post + MockResponse( + 200, + json.dumps( + { + "data": { + "scopes": [ + "business_management", + "whatsapp_business_management", + "whatsapp_business_messaging", + ], + "is_valid": True, } - ), + } ), - # getting waba phone numbers - MockResponse( - 200, - json.dumps( - { - "data": [ - {"id": "123123123", "display_phone_number": "1234", "verified_name": "WABA name"} + ), + # getting target waba + MockResponse( + 200, + json.dumps( + { + "data": { + "granular_scopes": [ + { + "scope": "business_management", + "target_ids": [ + "2222222222222", + ], + }, + { + "scope": "whatsapp_business_management", + "target_ids": [ + "111111111111111", + ], + }, + { + "scope": "whatsapp_business_messaging", + "target_ids": [ + "111111111111111", + ], + }, ] } - ), + } + ), + ), + # getting waba details + MockResponse( + 200, + json.dumps( + { + "id": "111111111111111", + "currency": "USD", + "message_template_namespace": "namespace-uuid", + "owner_business_info": {"id": "2222222222222"}, + } ), - # getting te credit line ID - MockResponse(200, json.dumps({"data": [{"id": "567567567"}]})), - ] + ), + # getting waba phone numbers + MockResponse( + 200, + json.dumps( + {"data": [{"id": "123123123", "display_phone_number": "1234", "verified_name": "WABA name"}]} + ), + ), + # getting te credit line ID + MockResponse(200, json.dumps({"data": [{"id": "567567567"}]})), + ] - wa_cloud_post.return_value = MockResponse(200, json.dumps({"success": "true"})) + wa_cloud_post.return_value = MockResponse(200, json.dumps({"success": "true"})) - response = self.client.get(claim_whatsapp_cloud_url, follow=True) + response = self.client.get(claim_whatsapp_cloud_url, follow=True) - wa_cloud_get.reset_mock() + wa_cloud_get.reset_mock() - response = self.client.post(claim_whatsapp_cloud_url, post_data, follow=True) - self.assertEqual(200, response.status_code) - self.assertEqual( - response.context["form"].errors["__all__"][0], - "This channel is already connected in this workspace.", - ) + response = self.client.post(claim_whatsapp_cloud_url, post_data, follow=True) + self.assertEqual(200, response.status_code) + self.assertEqual( + response.context["form"].errors["__all__"][0], + "This channel is already connected in this workspace.", + ) def test_clear_session_token(self): Channel.objects.all().delete() diff --git a/temba/channels/types/whatsapp/type.py b/temba/channels/types/whatsapp/type.py index 3c864becd36..01b8b379169 100644 --- a/temba/channels/types/whatsapp/type.py +++ b/temba/channels/types/whatsapp/type.py @@ -29,6 +29,7 @@ class WhatsAppType(ChannelType): courier_url = r"^wac/receive" schemes = [URN.WHATSAPP_SCHEME] + async_activation = False template_type = "whatsapp" claim_blurb = _("If you have an enterprise WhatsApp account, you can connect it to communicate with your contacts") @@ -39,12 +40,14 @@ class WhatsAppType(ChannelType): def get_urls(self): return [ self.get_claim_url(), - re_path(r"^clear_session_token$", ClearSessionToken.as_view(channel_type=self), name="clear_session_token"), re_path( - r"^(?P[a-z0-9\-]+)/request_code$", RequestCode.as_view(channel_type=self), name="request_code" + r"^clear_session_token/$", ClearSessionToken.as_view(channel_type=self), name="clear_session_token" ), - re_path(r"^(?P[a-z0-9\-]+)/verify_code$", VerifyCode.as_view(channel_type=self), name="verify_code"), - re_path(r"^connect$", Connect.as_view(channel_type=self), name="connect"), + re_path( + r"^(?P[a-z0-9\-]+)/request_code/$", RequestCode.as_view(channel_type=self), name="request_code" + ), + re_path(r"^(?P[a-z0-9\-]+)/verify_code/$", VerifyCode.as_view(channel_type=self), name="verify_code"), + re_path(r"^connect/$", Connect.as_view(channel_type=self), name="connect"), ] def activate(self, channel): diff --git a/temba/channels/types/whatsapp/views.py b/temba/channels/types/whatsapp/views.py index 2d667eae534..0f8aacf6838 100644 --- a/temba/channels/types/whatsapp/views.py +++ b/temba/channels/types/whatsapp/views.py @@ -10,10 +10,9 @@ from django.utils.translation import gettext_lazy as _ from temba.channels.views import ChannelTypeMixin -from temba.orgs.views import ModalMixin, OrgObjPermsMixin, OrgPermsMixin +from temba.orgs.views.mixins import OrgObjPermsMixin, OrgPermsMixin from temba.utils.fields import InputWidget from temba.utils.text import truncate -from temba.utils.views import ContentMenuMixin from ...models import Channel from ...views import ClaimViewMixin @@ -219,7 +218,7 @@ def render_to_response(self, context, **response_kwargs): return JsonResponse({}) -class RequestCode(ChannelTypeMixin, ModalMixin, ContentMenuMixin, OrgObjPermsMixin, SmartModelActionView): +class RequestCode(ChannelTypeMixin, OrgObjPermsMixin, SmartModelActionView, SmartFormView): class Form(forms.Form): pass @@ -240,11 +239,6 @@ def get_success_url(self): def derive_menu_path(self): return f"/settings/channels/{self.get_object().uuid}" - def build_content_menu(self, menu): - obj = self.get_object() - - menu.add_link(_("Channel"), reverse("channels.channel_read", args=[obj.uuid])) - def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) phone_number_url = f"https://graph.facebook.com/v18.0/{self.object.address}" @@ -283,7 +277,7 @@ def execute_action(self): ) -class VerifyCode(ChannelTypeMixin, ModalMixin, ContentMenuMixin, OrgObjPermsMixin, SmartModelActionView): +class VerifyCode(ChannelTypeMixin, OrgObjPermsMixin, SmartModelActionView, SmartFormView): class Form(forms.Form): code = forms.CharField( min_length=6, required=True, help_text=_("The 6-digits number verification code"), widget=InputWidget() @@ -298,11 +292,6 @@ class Form(forms.Form): title = _("Verify Number") submit_button_name = _("Verify Number") - def build_content_menu(self, menu): - obj = self.get_object() - - menu.add_link(_("Channel"), reverse("channels.channel_read", args=[obj.uuid])) - def get_queryset(self): return Channel.objects.filter(is_active=True, org=self.request.org, channel_type=self.channel_type.code) @@ -345,6 +334,10 @@ class Connect(ChannelTypeMixin, OrgPermsMixin, SmartFormView): class WhatsappCloudConnectForm(forms.Form): user_access_token = forms.CharField(min_length=32, required=True) + def __init__(self, org, *args, **kwargs): + self.org = org + super().__init__(*args, **kwargs) + def clean(self): try: auth_token = self.cleaned_data.get("user_access_token", None) @@ -352,38 +345,33 @@ def clean(self): app_id = settings.FACEBOOK_APPLICATION_ID app_secret = settings.FACEBOOK_APPLICATION_SECRET - url = "https://graph.facebook.com/v18.0/debug_token" - params = {"access_token": f"{app_id}|{app_secret}", "input_token": auth_token} - - response = requests.get(url, params=params) - if response.status_code != 200: # pragma: no cover - auth_code = auth_token - + if settings.FACEBOOK_LOGIN_WHATSAPP_CONFIG_ID: token_request_data = { "client_id": app_id, "client_secret": app_secret, - "code": auth_code, + "code": auth_token, "grant_type": "authorization_code", "redirect_uri": "https://" - + self.derive_org().get_brand_domain() + + self.org.get_brand_domain() + reverse("channels.types.whatsapp.connect"), } token_url = "https://graph.facebook.com/v18.0/oauth/access_token" response = requests.post(token_url, json=token_request_data) response_json = response.json() + if int(response.status_code / 100) == 2: + auth_token = response_json["access_token"] - auth_token = response_json["access_token"] - - params = {"access_token": f"{app_id}|{app_secret}", "input_token": auth_token} - - response = requests.get(url, params=params) - if response.status_code == 200: - self.cleaned_data["user_access_token"] = auth_token - else: - raise Exception("Failed to debug user token") + url = "https://graph.facebook.com/v18.0/debug_token" + params = {"access_token": f"{app_id}|{app_secret}", "input_token": auth_token} + response = requests.get(url, params=params) response_json = response.json() + if response.status_code == 200: + self.cleaned_data["user_access_token"] = auth_token + else: + raise Exception("Failed to debug user token") + for perm in ["business_management", "whatsapp_business_management", "whatsapp_business_messaging"]: if perm not in response_json.get("data", dict()).get("scopes", []): raise Exception( @@ -394,7 +382,7 @@ def clean(self): return self.cleaned_data - permission = "channels.types.whatsapp.connect" + permission = "channels.channel_claim" form_class = WhatsappCloudConnectForm success_url = "@channels.types.whatsapp.claim" field_config = dict(api_key=dict(label=""), api_secret=dict(label="")) @@ -404,8 +392,8 @@ def clean(self): menu_path = "/settings/workspace" title = "Connect WhatsApp" - def has_org_perm(self, permission): - return self.get_user().is_beta # only beta users are allowed + def has_permission(self, request, *args, **kwargs) -> bool: + return super().has_permission(request, *args, **kwargs) and self.request.user.is_beta def pre_process(self, request, *args, **kwargs): session_token = self.request.session.get(self.channel_type.SESSION_USER_TOKEN, None) @@ -414,6 +402,11 @@ def pre_process(self, request, *args, **kwargs): return super().pre_process(request, *args, **kwargs) + def get_form_kwargs(self): + kwargs = super().get_form_kwargs() + kwargs["org"] = self.request.org + return kwargs + def form_valid(self, form): auth_token = form.cleaned_data["user_access_token"] diff --git a/temba/channels/types/whatsapp_legacy/tests.py b/temba/channels/types/whatsapp_legacy/tests.py index 9b550bfc384..ab96f46f6d1 100644 --- a/temba/channels/types/whatsapp_legacy/tests.py +++ b/temba/channels/types/whatsapp_legacy/tests.py @@ -46,7 +46,7 @@ def test_claim(self, mock_health, mock_socket_hostname): post_data["username"] = "temba" post_data["password"] = "tembapasswd" post_data["country"] = "RW" - post_data["base_url"] = "https://nyaruka.com/whatsapp" + post_data["base_url"] = "https://textit.com/whatsapp" post_data["facebook_namespace"] = "my-custom-app" post_data["facebook_business_id"] = "1234" post_data["facebook_access_token"] = "token123" @@ -102,7 +102,7 @@ def test_claim(self, mock_health, mock_socket_hostname): self.assertEqual("temba", channel.config[Channel.CONFIG_USERNAME]) self.assertEqual("tembapasswd", channel.config[Channel.CONFIG_PASSWORD]) self.assertEqual("abc123", channel.config[Channel.CONFIG_AUTH_TOKEN]) - self.assertEqual("https://nyaruka.com/whatsapp", channel.config[Channel.CONFIG_BASE_URL]) + self.assertEqual("https://textit.com/whatsapp", channel.config[Channel.CONFIG_BASE_URL]) self.assertNotIn(CONFIG_FB_TEMPLATE_API_VERSION, channel.config) self.assertEqual("+250788123123", channel.address) @@ -198,7 +198,7 @@ def test_duplicate_number_channels(self, mock_health, mock_socket_hostname): post_data["username"] = "temba" post_data["password"] = "tembapasswd" post_data["country"] = "RW" - post_data["base_url"] = "https://nyaruka.com/whatsapp" + post_data["base_url"] = "https://textit.com/whatsapp" post_data["facebook_namespace"] = "my-custom-app" post_data["facebook_business_id"] = "1234" post_data["facebook_access_token"] = "token123" @@ -258,7 +258,7 @@ def test_refresh_tokens(self): "WhatsApp: 1234", "1234", config={ - Channel.CONFIG_BASE_URL: "https://nyaruka.com/whatsapp", + Channel.CONFIG_BASE_URL: "https://textit.com/whatsapp", Channel.CONFIG_USERNAME: "temba", Channel.CONFIG_PASSWORD: "tembapasswd", Channel.CONFIG_AUTH_TOKEN: "authtoken123", @@ -274,7 +274,7 @@ def test_refresh_tokens(self): "WhatsApp: 1235", "1235", config={ - Channel.CONFIG_BASE_URL: "https://nyaruka.com/whatsapp", + Channel.CONFIG_BASE_URL: "https://textit.com/whatsapp", Channel.CONFIG_USERNAME: "temba", Channel.CONFIG_PASSWORD: "tembapasswd", Channel.CONFIG_AUTH_TOKEN: "authtoken123", @@ -387,7 +387,7 @@ def test_claim_self_hosted_templates(self, mock_health, mock_socket_hostname): post_data["username"] = "temba" post_data["password"] = "tembapasswd" post_data["country"] = "RW" - post_data["base_url"] = "https://nyaruka.com/whatsapp" + post_data["base_url"] = "https://textit.com/whatsapp" post_data["facebook_namespace"] = "my-custom-app" post_data["facebook_business_id"] = "1234" post_data["facebook_access_token"] = "token123" @@ -427,7 +427,7 @@ def test_claim_self_hosted_templates(self, mock_health, mock_socket_hostname): self.assertEqual("temba", channel.config[Channel.CONFIG_USERNAME]) self.assertEqual("tembapasswd", channel.config[Channel.CONFIG_PASSWORD]) self.assertEqual("abc123", channel.config[Channel.CONFIG_AUTH_TOKEN]) - self.assertEqual("https://nyaruka.com/whatsapp", channel.config[Channel.CONFIG_BASE_URL]) + self.assertEqual("https://textit.com/whatsapp", channel.config[Channel.CONFIG_BASE_URL]) self.assertEqual("v3.3", channel.config[CONFIG_FB_TEMPLATE_API_VERSION]) self.assertEqual("+250788123123", channel.address) @@ -442,7 +442,7 @@ def test_fetch_templates(self, mock_get): "WhatsApp: 1234", "1234", config={ - Channel.CONFIG_BASE_URL: "https://nyaruka.com/whatsapp", + Channel.CONFIG_BASE_URL: "https://textit.com/whatsapp", Channel.CONFIG_USERNAME: "temba", Channel.CONFIG_PASSWORD: "tembapasswd", Channel.CONFIG_AUTH_TOKEN: "authtoken123", @@ -513,7 +513,7 @@ def test_check_health(self): "WhatsApp: 1234", "1234", config={ - Channel.CONFIG_BASE_URL: "https://nyaruka.com/whatsapp", + Channel.CONFIG_BASE_URL: "https://textit.com/whatsapp", Channel.CONFIG_USERNAME: "temba", Channel.CONFIG_PASSWORD: "tembapasswd", Channel.CONFIG_AUTH_TOKEN: "authtoken123", @@ -531,12 +531,23 @@ def test_check_health(self): MockResponse(401, ""), ] - with self.assertRaises(Exception): + with patch("logging.Logger.debug") as mock_log_debug: channel.type.check_health(channel) + self.assertEqual(1, mock_log_debug.call_count) + self.assertEqual( + "Could not establish a connection with the WhatsApp server: Network is unreachable", + mock_log_debug.call_args[0][0], + ) channel.type.check_health(channel) mock_get.assert_called_with( - "https://nyaruka.com/whatsapp/v1/health", headers={"Authorization": "Bearer authtoken123"} + "https://textit.com/whatsapp/v1/health", headers={"Authorization": "Bearer authtoken123"} ) - with self.assertRaises(Exception): + + with patch("logging.Logger.debug") as mock_log_debug: channel.type.check_health(channel) + self.assertEqual(1, mock_log_debug.call_count) + self.assertEqual( + "Error checking API health: b''", + mock_log_debug.call_args[0][0], + ) diff --git a/temba/channels/types/whatsapp_legacy/type.py b/temba/channels/types/whatsapp_legacy/type.py index 577ba145a9a..9d1ebda9df4 100644 --- a/temba/channels/types/whatsapp_legacy/type.py +++ b/temba/channels/types/whatsapp_legacy/type.py @@ -1,4 +1,5 @@ import base64 +import logging import requests @@ -24,6 +25,8 @@ TEMPLATE_LIST_URL = "https://%s/%s/%s/message_templates" +logger = logging.getLogger(__name__) + class WhatsAppLegacyType(ChannelType): """ @@ -49,7 +52,7 @@ class WhatsAppLegacyType(ChannelType): def get_urls(self): return [ self.get_claim_url(), - re_path(r"^(?P[a-z0-9\-]+)/refresh$", RefreshView.as_view(channel_type=self), name="refresh"), + re_path(r"^(?P[a-z0-9\-]+)/refresh/$", RefreshView.as_view(channel_type=self), name="refresh"), ] def get_api_headers(self, channel): @@ -113,14 +116,23 @@ def fetch_templates(self, channel) -> list: def check_health(self, channel): headers = self.get_api_headers(channel) + start = timezone.now() try: response = requests.get(channel.config[Channel.CONFIG_BASE_URL] + "/v1/health", headers=headers) except Exception as ex: - raise Exception(f"Could not establish a connection with the WhatsApp server: {ex}") + logger.debug(f"Could not establish a connection with the WhatsApp server: {ex}") + return if response.status_code >= 400: - raise requests.RequestException(f"Error checking API health: {response.content}", response=response) + HTTPLog.from_exception( + HTTPLog.WHATSAPP_CHECK_HEALTH, + requests.RequestException(f"Error checking API health: {response.content}", response=response), + start, + channel=channel, + ) + logger.debug(f"Error checking API health: {response.content}") + return return response diff --git a/temba/channels/urls.py b/temba/channels/urls.py index 59255aca2c6..eb226e67e43 100644 --- a/temba/channels/urls.py +++ b/temba/channels/urls.py @@ -3,6 +3,7 @@ from temba.utils.views import CourierURLHandler +from .android.views import register, sync from .models import Channel from .views import ChannelCRUDL, ChannelLogCRUDL @@ -29,4 +30,6 @@ re_path(r"^channels/", include(ChannelCRUDL().as_urlpatterns() + ChannelLogCRUDL().as_urlpatterns())), re_path(r"^c/", include(courier_urls)), re_path(r"^channels/types/", include(type_urls)), + re_path(r"^relayers/relayer/sync/(\d+)/$", sync, {}, "sync"), + re_path(r"^relayers/relayer/register/$", register, {}, "register"), ] diff --git a/temba/channels/views.py b/temba/channels/views.py index c738484458f..3a1373855b0 100644 --- a/temba/channels/views.py +++ b/temba/channels/views.py @@ -3,10 +3,10 @@ from datetime import timedelta from typing import Any -import nexmo import phonenumbers import requests import twilio.base.exceptions +import vonage from smartmin.views import ( SmartCRUDL, SmartFormView, @@ -35,12 +35,13 @@ from temba.ivr.models import Call from temba.msgs.models import Msg from temba.notifications.views import NotificationTargetMixin -from temba.orgs.views import DependencyDeleteModal, ModalMixin, OrgObjPermsMixin, OrgPermsMixin +from temba.orgs.views.base import BaseDependencyDeleteModal, BaseReadView +from temba.orgs.views.mixins import OrgObjPermsMixin, OrgPermsMixin from temba.utils import countries from temba.utils.fields import SelectWidget from temba.utils.json import EpochEncoder from temba.utils.models import patch_queryset_count -from temba.utils.views import ComponentFormMixin, ContentMenuMixin, SpaMixin +from temba.utils.views.mixins import ComponentFormMixin, ContextMenuMixin, ModalFormMixin, SpaMixin from .models import Channel, ChannelCount, ChannelLog @@ -374,8 +375,8 @@ def form_valid(self, form, *args, **kwargs): return HttpResponseRedirect("%s?success" % reverse("public.public_welcome")) except ( - nexmo.AuthenticationError, - nexmo.ClientError, + vonage.AuthenticationError, + vonage.ClientError, twilio.base.exceptions.TwilioRestException, ) as e: # pragma: no cover logger.warning(f"Unable to claim a number: {str(e)}", exc_info=True) @@ -472,20 +473,17 @@ class ChannelCRUDL(SmartCRUDL): "facebook_whitelist", ) - class Read(SpaMixin, OrgObjPermsMixin, ContentMenuMixin, NotificationTargetMixin, SmartReadView): + class Read(SpaMixin, ContextMenuMixin, NotificationTargetMixin, BaseReadView): slug_url_kwarg = "uuid" exclude = ("id", "is_active", "created_by", "modified_by", "modified_on") def derive_menu_path(self): - return f"/settings/channels/{self.get_object().uuid}" - - def get_queryset(self): - return Channel.objects.filter(is_active=True) + return f"/settings/channels/{self.object.uuid}" def get_notification_scope(self) -> tuple: return "incident:started", str(self.object.id) - def build_content_menu(self, menu): + def build_context_menu(self, menu): obj = self.get_object() for item in obj.type.menu_items: @@ -596,13 +594,10 @@ def get_context_data(self, **kwargs): return context - class Chart(OrgObjPermsMixin, SmartReadView): + class Chart(BaseReadView): permission = "channels.channel_read" slug_url_kwarg = "uuid" - def get_queryset(self): - return Channel.objects.filter(is_active=True) - def render_to_response(self, context, **response_kwargs): channel = self.object @@ -662,7 +657,7 @@ def render_to_response(self, context, **response_kwargs): encoder=EpochEncoder, ) - class FacebookWhitelist(ComponentFormMixin, ModalMixin, OrgObjPermsMixin, SmartModelActionView): + class FacebookWhitelist(ComponentFormMixin, ModalFormMixin, OrgObjPermsMixin, SmartModelActionView): class DomainForm(forms.Form): whitelisted_domain = forms.URLField( required=True, @@ -698,7 +693,7 @@ def execute_action(self): default_error = dict(message=_("An error occured contacting the Facebook API")) raise ValidationError(response_json.get("error", default_error)["message"]) - class Delete(DependencyDeleteModal, SpaMixin): + class Delete(BaseDependencyDeleteModal): cancel_url = "uuid@channels.channel_read" success_url = "@orgs.org_workspace" success_message = _("Your channel has been removed.") @@ -719,7 +714,7 @@ def post(self, request, *args, **kwargs): ) response = HttpResponse() - response["Temba-Success"] = self.cancel_url + response["X-Temba-Success"] = self.cancel_url return response # override success message for Twilio channels @@ -729,10 +724,10 @@ def post(self, request, *args, **kwargs): messages.info(request, self.success_message) response = HttpResponse() - response["Temba-Success"] = self.get_success_url() + response["X-Temba-Success"] = self.get_success_url() return response - class Update(OrgObjPermsMixin, ComponentFormMixin, ModalMixin, SmartUpdateView): + class Update(ComponentFormMixin, ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): def derive_title(self): return _("%s Channel") % self.object.type.name @@ -812,7 +807,7 @@ def channel_types_groups(self): return recommended_channels, types_by_category, False - class Configuration(SpaMixin, OrgObjPermsMixin, SmartReadView): + class Configuration(SpaMixin, BaseReadView): slug_url_kwarg = "uuid" def pre_process(self, *args, **kwargs): @@ -932,9 +927,7 @@ def get_context_data(self, **kwargs): anonymize = self.request.org.is_anon and not (self.request.GET.get("break") and self.request.user.is_staff) logs = [] for log in self.owner.get_logs(): - logs.append( - ChannelLog.display(log, anonymize=anonymize, channel=self.owner.channel, urn=self.owner.contact_urn) - ) + logs.append(log.get_display(anonymize=anonymize, urn=self.owner.contact_urn)) context["logs"] = logs return context diff --git a/temba/classifiers/tests/__init__.py b/temba/classifiers/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/temba/classifiers/tests/test_classifier.py b/temba/classifiers/tests/test_classifier.py new file mode 100644 index 00000000000..38c96d5d254 --- /dev/null +++ b/temba/classifiers/tests/test_classifier.py @@ -0,0 +1,72 @@ +from unittest.mock import patch + +from django.utils import timezone + +from temba.classifiers.models import Classifier +from temba.classifiers.types.wit import WitType +from temba.request_logs.models import HTTPLog +from temba.tests import MockResponse, TembaTest + +INTENT_RESPONSE = """ +[ + { + "id": "754569408690533", + "name": "book_car" + }, + { + "id": "754569408690020", + "name": "book_horse" + }, + { + "id": "754569408690131", + "name": "book_hotel" + } +] +""" + + +class ClassifierTest(TembaTest): + def setUp(self): + super().setUp() + + # create some classifiers + self.c1 = Classifier.create(self.org, self.admin, WitType.slug, "Booker", {}, sync=False) + self.c1.intents.create(name="book_flight", external_id="book_flight", created_on=timezone.now(), is_active=True) + self.c1.intents.create( + name="book_hotel", external_id="754569408690131", created_on=timezone.now(), is_active=False + ) + self.c1.intents.create( + name="book_car", external_id="754569408690533", created_on=timezone.now(), is_active=True + ) + + def test_syncing(self): + # will fail due to missing keys + self.c1.async_sync() + + # no intents should have been changed / removed as this was an error + self.assertEqual(2, self.c1.active_intents().count()) + + # ok, fix our config + self.c1.config = {WitType.CONFIG_ACCESS_TOKEN: "sesasme", WitType.CONFIG_APP_ID: "1234"} + self.c1.save() + + # try again + with patch("requests.get") as mock_get: + mock_get.return_value = MockResponse(200, INTENT_RESPONSE) + self.c1.async_sync() + + # should have three active intents + intents = self.c1.active_intents() + self.assertEqual(3, intents.count()) + self.assertEqual("book_car", intents[0].name) + self.assertEqual("754569408690533", intents[0].external_id) + self.assertEqual("book_horse", intents[1].name) + self.assertEqual("754569408690020", intents[1].external_id) + self.assertEqual("book_hotel", intents[2].name) + self.assertEqual("754569408690131", intents[2].external_id) + + # one inactive + self.assertEqual(1, self.c1.intents.filter(is_active=False).count()) + + # one classifier log + self.assertEqual(1, HTTPLog.objects.filter(classifier=self.c1, org=self.org).count()) diff --git a/temba/classifiers/tests.py b/temba/classifiers/tests/test_classifiercrudl.py similarity index 63% rename from temba/classifiers/tests.py rename to temba/classifiers/tests/test_classifiercrudl.py index e8245e819c8..621f0aac832 100644 --- a/temba/classifiers/tests.py +++ b/temba/classifiers/tests/test_classifiercrudl.py @@ -3,77 +3,11 @@ from django.urls import reverse from django.utils import timezone -from temba.request_logs.models import HTTPLog -from temba.tests import CRUDLTestMixin, MockResponse, TembaTest -from temba.utils.views import TEMBA_MENU_SELECTION - -from .models import Classifier -from .types.luis import LuisType -from .types.wit import WitType - -INTENT_RESPONSE = """ -[ - { - "id": "754569408690533", - "name": "book_car" - }, - { - "id": "754569408690020", - "name": "book_horse" - }, - { - "id": "754569408690131", - "name": "book_hotel" - } -] -""" - - -class ClassifierTest(TembaTest): - def setUp(self): - super().setUp() - - # create some classifiers - self.c1 = Classifier.create(self.org, self.admin, WitType.slug, "Booker", {}, sync=False) - self.c1.intents.create(name="book_flight", external_id="book_flight", created_on=timezone.now(), is_active=True) - self.c1.intents.create( - name="book_hotel", external_id="754569408690131", created_on=timezone.now(), is_active=False - ) - self.c1.intents.create( - name="book_car", external_id="754569408690533", created_on=timezone.now(), is_active=True - ) - - def test_syncing(self): - # will fail due to missing keys - self.c1.async_sync() - - # no intents should have been changed / removed as this was an error - self.assertEqual(2, self.c1.active_intents().count()) - - # ok, fix our config - self.c1.config = {WitType.CONFIG_ACCESS_TOKEN: "sesasme", WitType.CONFIG_APP_ID: "1234"} - self.c1.save() - - # try again - with patch("requests.get") as mock_get: - mock_get.return_value = MockResponse(200, INTENT_RESPONSE) - self.c1.async_sync() - - # should have three active intents - intents = self.c1.active_intents() - self.assertEqual(3, intents.count()) - self.assertEqual("book_car", intents[0].name) - self.assertEqual("754569408690533", intents[0].external_id) - self.assertEqual("book_horse", intents[1].name) - self.assertEqual("754569408690020", intents[1].external_id) - self.assertEqual("book_hotel", intents[2].name) - self.assertEqual("754569408690131", intents[2].external_id) - - # one inactive - self.assertEqual(1, self.c1.intents.filter(is_active=False).count()) - - # one classifier log - self.assertEqual(1, HTTPLog.objects.filter(classifier=self.c1, org=self.org).count()) +from temba.classifiers.models import Classifier +from temba.classifiers.types.luis import LuisType +from temba.classifiers.types.wit import WitType +from temba.tests import CRUDLTestMixin, TembaTest +from temba.utils.views.mixins import TEMBA_MENU_SELECTION class ClassifierCRUDLTest(TembaTest, CRUDLTestMixin): @@ -164,7 +98,7 @@ def test_delete(self): self.assertContains(response, "You are about to delete") response = self.assertDeleteSubmit(delete_url, self.admin, object_deactivated=self.c2, success_status=200) - self.assertEqual("/org/workspace/", response["Temba-Success"]) + self.assertEqual("/org/workspace/", response["X-Temba-Success"]) # should see warning if global is being used delete_url = reverse("classifiers.classifier_delete", args=[self.c1.uuid]) @@ -176,7 +110,7 @@ def test_delete(self): self.assertContains(response, "Color Flow") response = self.assertDeleteSubmit(delete_url, self.admin, object_deactivated=self.c1, success_status=200) - self.assertEqual("/org/workspace/", response["Temba-Success"]) + self.assertEqual("/org/workspace/", response["X-Temba-Success"]) self.flow.refresh_from_db() self.assertTrue(self.flow.has_issues) diff --git a/temba/classifiers/views.py b/temba/classifiers/views.py index 63ad01c6b37..376e3ebbf4f 100644 --- a/temba/classifiers/views.py +++ b/temba/classifiers/views.py @@ -1,12 +1,13 @@ -from smartmin.views import SmartCRUDL, SmartFormView, SmartReadView, SmartTemplateView, SmartUpdateView +from smartmin.views import SmartCRUDL, SmartFormView, SmartTemplateView, SmartUpdateView from django.contrib import messages from django.http import HttpResponseRedirect from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from temba.orgs.views import DependencyDeleteModal, OrgObjPermsMixin, OrgPermsMixin -from temba.utils.views import ComponentFormMixin, ContentMenuMixin, SpaMixin +from temba.orgs.views.base import BaseDependencyDeleteModal, BaseReadView +from temba.orgs.views.mixins import OrgObjPermsMixin, OrgPermsMixin +from temba.utils.views.mixins import ComponentFormMixin, ContextMenuMixin, SpaMixin from .models import Classifier @@ -42,19 +43,19 @@ class ClassifierCRUDL(SmartCRUDL): model = Classifier actions = ("read", "connect", "delete", "sync") - class Delete(DependencyDeleteModal): + class Delete(BaseDependencyDeleteModal): cancel_url = "uuid@classifiers.classifier_read" success_url = "@orgs.org_workspace" success_message = _("Your classifier has been deleted.") - class Read(SpaMixin, OrgObjPermsMixin, ContentMenuMixin, SmartReadView): + class Read(SpaMixin, ContextMenuMixin, BaseReadView): slug_url_kwarg = "uuid" exclude = ("id", "is_active", "created_by", "modified_by", "modified_on") def derive_menu_path(self): - return f"/settings/classifiers/{self.get_object().uuid}" + return f"/settings/classifiers/{self.object.uuid}" - def build_content_menu(self, menu): + def build_context_menu(self, menu): obj = self.get_object() menu.add_link(_("Log"), reverse("request_logs.httplog_classifier", args=[obj.uuid])) @@ -70,10 +71,6 @@ def build_content_menu(self, menu): title=_("Delete Classifier"), ) - def get_queryset(self, **kwargs): - queryset = super().get_queryset(**kwargs) - return queryset.filter(is_active=True) - class Sync(SpaMixin, OrgObjPermsMixin, SmartUpdateView): fields = () success_url = "uuid@classifiers.classifier_read" diff --git a/temba/contacts/forms.py b/temba/contacts/forms.py index 139042775e2..7b451d110e4 100644 --- a/temba/contacts/forms.py +++ b/temba/contacts/forms.py @@ -186,7 +186,7 @@ def clean_query(self): and self.instance.status != ContactGroup.STATUS_READY and parsed.query != self.instance.query ): - raise forms.ValidationError(_("You cannot update the query of a group that is evaluating.")) + raise forms.ValidationError(_("You cannot update the query of a group that is populating.")) return parsed.query diff --git a/temba/contacts/management/commands/reeval_group.py b/temba/contacts/management/commands/reeval_group.py deleted file mode 100644 index e8ae79b4a19..00000000000 --- a/temba/contacts/management/commands/reeval_group.py +++ /dev/null @@ -1,40 +0,0 @@ -import time - -from django.core.management.base import BaseCommand, CommandError - -from temba.contacts.models import ContactGroup -from temba.mailroom import queue_populate_dynamic_group - - -class Command(BaseCommand): - help = "Re-evaluates a smart group" - - def add_arguments(self, parser): - parser.add_argument("group_uuid", help="UUID of contact group to re-evaluate.") - - def handle(self, group_uuid: str, *args, **kwargs): - group = ContactGroup.objects.filter(uuid=group_uuid, group_type=ContactGroup.TYPE_SMART).first() - if not group: - raise CommandError("no such group") - - self.stdout.write( - f"Queueing re-evaluation for group {group.name} with query '{group.query}' " - f"and {group.get_member_count()} members..." - ) - - # mark group as evaluating - group.status = ContactGroup.STATUS_EVALUATING - group.save(update_fields=("status",)) - - queue_populate_dynamic_group(group) - - while True: - time.sleep(2) - - group.refresh_from_db() - if group.status == ContactGroup.STATUS_READY: - break - - self.stdout.write(f" > {group.get_member_count()} members...") - - self.stdout.write(f"Re-evaluation complete with {group.get_member_count()} members.") diff --git a/temba/contacts/migrations/0189_backfill_proxy_fields.py b/temba/contacts/migrations/0189_backfill_proxy_fields.py index 92934ccdfc5..8700863b140 100644 --- a/temba/contacts/migrations/0189_backfill_proxy_fields.py +++ b/temba/contacts/migrations/0189_backfill_proxy_fields.py @@ -3,7 +3,7 @@ from django.db import migrations -def backfill_proxy_fields(apps, schema_editor): +def backfill_proxy_fields(apps, schema_editor): # pragma: no cover ContactField = apps.get_model("contacts", "ContactField") # delete all old system fields that weren't usable @@ -17,12 +17,8 @@ def backfill_proxy_fields(apps, schema_editor): ContactField.objects.filter(is_system=True, key__in=("created_on", "last_seen_on")).update(is_proxy=True) -def reverse(apps, schema_editor): - pass - - class Migration(migrations.Migration): dependencies = [("contacts", "0188_contactfield_is_proxy_alter_contactfield_is_system")] - operations = [migrations.RunPython(backfill_proxy_fields, reverse)] + operations = [migrations.RunPython(backfill_proxy_fields, migrations.RunPython.noop)] diff --git a/temba/contacts/migrations/0193_fix_status_group_names.py b/temba/contacts/migrations/0193_fix_status_group_names.py new file mode 100644 index 00000000000..e2c80f36aea --- /dev/null +++ b/temba/contacts/migrations/0193_fix_status_group_names.py @@ -0,0 +1,32 @@ +# Generated by Django 5.1 on 2024-10-09 19:41 + +from django.db import migrations +from django.db.models import F, Value +from django.db.models.functions import Concat + + +def fix_status_group_names(apps, schema_editor): # pragma: no cover + ContactGroup = apps.get_model("contacts", "ContactGroup") + num_updated = 0 + + while True: + id_batch = list( + ContactGroup.objects.filter(group_type__in=("A", "B", "S", "V")) + .exclude(name__startswith="\\") + .values_list("id", flat=True)[:1000] + ) + if not id_batch: + break + + ContactGroup.objects.filter(id__in=id_batch).update(name=Concat(Value("\\"), F("name"))) + num_updated += len(id_batch) + + if num_updated: + print(f"Updated {num_updated} status group names") + + +class Migration(migrations.Migration): + + dependencies = [("contacts", "0192_alter_contactnote_text")] + + operations = [migrations.RunPython(fix_status_group_names, migrations.RunPython.noop)] diff --git a/temba/contacts/migrations/0194_alter_contactgroupcount_count.py b/temba/contacts/migrations/0194_alter_contactgroupcount_count.py new file mode 100644 index 00000000000..7d7266bf067 --- /dev/null +++ b/temba/contacts/migrations/0194_alter_contactgroupcount_count.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.2 on 2024-12-04 18:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("contacts", "0193_fix_status_group_names"), + ] + + operations = [ + migrations.AlterField( + model_name="contactgroupcount", + name="count", + field=models.IntegerField(), + ), + ] diff --git a/temba/contacts/migrations/0195_update_triggers.py b/temba/contacts/migrations/0195_update_triggers.py new file mode 100644 index 00000000000..5446ae51997 --- /dev/null +++ b/temba/contacts/migrations/0195_update_triggers.py @@ -0,0 +1,52 @@ +# Generated by Django 5.1.2 on 2024-12-05 16:19 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles DELETE statements on contacts_contactgroup_contacts table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_group_contacts_on_delete() RETURNS TRIGGER AS $$ +BEGIN + -- add negative count for all deleted rows + INSERT INTO contacts_contactgroupcount("group_id", "count", "is_squashed") + SELECT o.contactgroup_id, -count(*), FALSE FROM oldtab o GROUP BY 1; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +---------------------------------------------------------------------- +-- Handles INSERT statements on contacts_contactgroup_contacts table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_group_contacts_on_insert() RETURNS TRIGGER AS $$ +BEGIN + -- add positive count for all new rows + INSERT INTO contacts_contactgroupcount("group_id", "count", "is_squashed") + SELECT n.contactgroup_id, count(*), FALSE FROM newtab n GROUP BY 1; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER temba_group_contacts_on_delete +AFTER DELETE ON contacts_contactgroup_contacts REFERENCING OLD TABLE AS oldtab +FOR EACH STATEMENT EXECUTE PROCEDURE temba_group_contacts_on_delete(); + +CREATE TRIGGER temba_group_contacts_on_insert +AFTER INSERT ON contacts_contactgroup_contacts REFERENCING NEW TABLE AS newtab +FOR EACH STATEMENT EXECUTE PROCEDURE temba_group_contacts_on_insert(); + +DROP TRIGGER when_contact_groups_changed_then_update_count_trg ON contacts_contactgroup_contacts; +DROP FUNCTION update_group_count(); +""" + + +class Migration(migrations.Migration): + + dependencies = [ + ("contacts", "0194_alter_contactgroupcount_count"), + ("sql", "0006_squashed"), + ] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/contacts/models.py b/temba/contacts/models.py index 894184d83bd..38ac418e3eb 100644 --- a/temba/contacts/models.py +++ b/temba/contacts/models.py @@ -1,3 +1,4 @@ +import itertools import logging from datetime import date, datetime, timedelta, timezone as tzone from decimal import Decimal @@ -26,9 +27,10 @@ from temba.locations.models import AdminBoundary from temba.mailroom import ContactSpec, modifiers, queue_populate_dynamic_group from temba.orgs.models import DependencyMixin, Export, ExportType, Org, OrgRole, User -from temba.utils import chunk_list, format_number, on_transaction_commit +from temba.utils import format_number, on_transaction_commit from temba.utils.export import MultiSheetExporter -from temba.utils.models import JSONField, LegacyUUIDMixin, SquashableModel, TembaModel, delete_in_batches +from temba.utils.models import JSONField, LegacyUUIDMixin, TembaModel, delete_in_batches +from temba.utils.models.counts import BaseSquashableCount from temba.utils.text import unsnakify from temba.utils.urns import ParsedURN, parse_number, parse_urn from temba.utils.uuid import uuid4 @@ -495,13 +497,16 @@ def get_or_create(cls, org, user, key: str, name: str = None, value_type=None): ) @classmethod - def get_fields(cls, org: Org, viewable_by=None): + def get_fields(cls, org: Org, featured=None, viewable_by=None): """ Gets the fields for the given org """ fields = org.fields.filter(is_active=True, is_proxy=False) + if featured is not None: + fields = fields.filter(show_in_table=featured) + if viewable_by and org.get_user_role(viewable_by) == OrgRole.AGENT: fields = fields.exclude(agent_access=cls.ACCESS_NONE) @@ -639,7 +644,7 @@ def get_status_counts(cls, org) -> dict: Returns the counts for each contact status for the given org """ groups = org.groups.filter(group_type__in=ContactGroup.CONTACT_STATUS_TYPES) - return {g.group_type: count for g, count in ContactGroupCount.get_totals(groups).items()} + return {g.group_type: count for g, count in ContactGroup.get_member_counts(groups).items()} def get_scheduled_broadcasts(self): from temba.msgs.models import SystemLabel @@ -836,7 +841,7 @@ def get_field_serialized(self, field) -> str: return value_dict.get(engine_type) - def get_field_value(self, field): + def get_field_value(self, field: ContactField): """ Given the passed in contact field object, returns the value (as a string, decimal, datetime, AdminBoundary) for this contact or None. @@ -858,7 +863,7 @@ def get_field_value(self, field): elif field.value_type in [ContactField.TYPE_STATE, ContactField.TYPE_DISTRICT, ContactField.TYPE_WARD]: return AdminBoundary.get_by_path(self.org, string_value) - def get_field_display(self, field): + def get_field_display(self, field: ContactField) -> str: """ Returns the display value for the passed in field, or empty string if None """ @@ -1057,13 +1062,17 @@ def restore(self, user): Contact.bulk_change_status(user, [self], modifiers.Status.ACTIVE) self.refresh_from_db() - def release(self, user, *, immediately=False): + def release(self, user, *, immediately=False, deindex=True): """ Releases this contact. Note that we clear all identifying data but don't hard delete the contact because we need to expose deleted contacts over the API to allow external systems to know that contacts have been deleted. """ from .tasks import full_release_contact + # do de-indexing first so if it fails for some reason, we don't go through with the delete + if deindex: + mailroom.get_client().contact_deindex(self.org, [self]) + with transaction.atomic(): # prep our urns for deletion so our old path creates a new urn for urn in self.urns.all(): @@ -1152,7 +1161,7 @@ def _full_release(self): broadcast.contacts.remove(self) @classmethod - def bulk_urn_cache_initialize(cls, contacts, *, using="default"): + def bulk_urn_cache_initialize(cls, contacts, *, using: str = "default"): """ Initializes the URN caches on the given contacts. """ @@ -1426,7 +1435,7 @@ def create_system_groups(cls, org): assert not org.groups.filter(is_system=True).exists(), "org already has system groups" org.groups.create( - name="Active", + name="\\Active", # to avoid name collisions with real groups group_type=ContactGroup.TYPE_DB_ACTIVE, is_system=True, status=cls.STATUS_READY, @@ -1434,7 +1443,7 @@ def create_system_groups(cls, org): modified_by=org.modified_by, ) org.groups.create( - name="Blocked", + name="\\Blocked", group_type=ContactGroup.TYPE_DB_BLOCKED, is_system=True, status=cls.STATUS_READY, @@ -1442,7 +1451,7 @@ def create_system_groups(cls, org): modified_by=org.modified_by, ) org.groups.create( - name="Stopped", + name="\\Stopped", group_type=ContactGroup.TYPE_DB_STOPPED, is_system=True, status=cls.STATUS_READY, @@ -1450,7 +1459,7 @@ def create_system_groups(cls, org): modified_by=org.modified_by, ) org.groups.create( - name="Archived", + name="\\Archived", group_type=ContactGroup.TYPE_DB_ARCHIVED, is_system=True, status=cls.STATUS_READY, @@ -1587,11 +1596,20 @@ def update_query(self, query, reevaluate=True, parsed=None): if reevaluate: on_transaction_commit(lambda: queue_populate_dynamic_group(self)) + @classmethod + def get_member_counts(cls, groups) -> dict: + """ + Gets contact counts for the given groups + """ + counts = ContactGroupCount.objects.filter(group__in=groups).values("group_id").annotate(count_sum=Sum("count")) + by_group_id = {c["group_id"]: c["count_sum"] for c in counts} + return {g: by_group_id.get(g.id, 0) for g in groups} + def get_member_count(self): """ Returns the number of contacts in the group """ - return ContactGroupCount.get_totals([self])[self] + return ContactGroup.get_member_counts([self])[self] def get_dependents(self): dependents = super().get_dependents() @@ -1640,7 +1658,7 @@ def _full_release(self): ContactGroupContacts = self.contacts.through memberships = ContactGroupContacts.objects.filter(contactgroup_id=self.id) - for batch in chunk_list(memberships, 100): + for batch in itertools.batched(memberships, 100): ContactGroupContacts.objects.filter(id__in=[m.id for m in batch]).delete() Contact.objects.filter(id__in=[m.contact_id for m in batch]).update(modified_on=timezone.now()) @@ -1700,7 +1718,7 @@ class ContactNote(models.Model): created_by = models.ForeignKey(User, on_delete=models.PROTECT, related_name="contact_notes") -class ContactGroupCount(SquashableModel): +class ContactGroupCount(BaseSquashableCount): """ Maintains counts of contact groups. These are calculated via triggers on the database and squashed by a recurring task. @@ -1709,42 +1727,6 @@ class ContactGroupCount(SquashableModel): squash_over = ("group_id",) group = models.ForeignKey(ContactGroup, on_delete=models.PROTECT, related_name="counts", db_index=True) - count = models.IntegerField(default=0) - - @classmethod - def get_squash_query(cls, distinct_set): - sql = """ - WITH deleted as ( - DELETE FROM %(table)s WHERE "group_id" = %%s RETURNING "count" - ) - INSERT INTO %(table)s("group_id", "count", "is_squashed") - VALUES (%%s, GREATEST(0, (SELECT SUM("count") FROM deleted)), TRUE); - """ % { - "table": cls._meta.db_table - } - - return sql, (distinct_set.group_id,) * 2 - - @classmethod - def get_totals(cls, groups) -> dict: - """ - Gets total counts for all the given groups - """ - counts = cls.objects.filter(group__in=groups) - counts = counts.values("group").order_by("group").annotate(count_sum=Sum("count")) - counts_by_group_id = {c["group"]: c["count_sum"] for c in counts} - return {g: counts_by_group_id.get(g.id, 0) for g in groups} - - @classmethod - def populate_for_group(cls, group): - # remove old ones - ContactGroupCount.objects.filter(group=group).delete() - - # calculate our count for the group - count = group.contacts.all().count() - - # insert updated count, returning it - return ContactGroupCount.objects.create(group=group, count=count) class Meta: indexes = [ @@ -1868,7 +1850,7 @@ def write(self, export): num_records = 0 # write out contacts in batches to limit memory usage - for batch_ids in chunk_list(contact_ids, 1000): + for batch_ids in itertools.batched(contact_ids, 1000): # fetch all the contacts for our batch batch_contacts = ( Contact.objects.filter(id__in=batch_ids).prefetch_related("org", "groups").using("readonly") @@ -1985,7 +1967,10 @@ def try_to_parse(cls, org: Org, file, filename: str) -> tuple[list, int]: total number of records. Otherwise raises a ValidationError. """ - workbook = load_workbook(filename=file, read_only=True) + try: + workbook = load_workbook(filename=file, read_only=True, data_only=True) + except Exception: + raise ValidationError(_("Import file appears to be corrupted.")) ws = workbook.active # see https://openpyxl.readthedocs.io/en/latest/optimized.html#worksheet-dimensions but even with this we need @@ -2203,8 +2188,9 @@ def start(self): self.save(update_fields=("group",)) # parse each row, creating batch tasks for mailroom - workbook = load_workbook(filename=self.file, read_only=True) + workbook = load_workbook(filename=self.file, read_only=True, data_only=True) ws = workbook.active + ws.reset_dimensions() # see https://openpyxl.readthedocs.io/en/latest/optimized.html#worksheet-dimensions data = ws.iter_rows(min_row=2) urns = [] diff --git a/temba/contacts/omnibox.py b/temba/contacts/omnibox.py index 79265ce0e69..87dd6b70cba 100644 --- a/temba/contacts/omnibox.py +++ b/temba/contacts/omnibox.py @@ -5,7 +5,7 @@ from temba import mailroom from temba.utils.models.es import IDSliceQuerySet -from .models import Contact, ContactGroup, ContactGroupCount +from .models import Contact, ContactGroup SEARCH_ALL_GROUPS = "g" SEARCH_STATIC_GROUPS = "s" @@ -90,7 +90,7 @@ def omnibox_serialize(org, groups, contacts, *, encode=False): Serializes lists of groups and contacts into the combined list format expected by the omnibox. """ - group_counts = ContactGroupCount.get_totals(groups) if groups else {} + group_counts = ContactGroup.get_member_counts(groups) if groups else {} results = [] for group in groups: diff --git a/temba/contacts/tasks.py b/temba/contacts/tasks.py index 9238dd6cd56..65b8e830096 100644 --- a/temba/contacts/tasks.py +++ b/temba/contacts/tasks.py @@ -1,10 +1,9 @@ +import itertools import logging from celery import shared_task -from django.contrib.auth.models import User - -from temba.utils import chunk_list +from temba.orgs.models import User from temba.utils.crons import cron_task from .models import Contact, ContactGroup, ContactGroupCount, ContactImport @@ -19,7 +18,7 @@ def release_contacts(user_id, contact_ids): """ user = User.objects.get(pk=user_id) - for id_batch in chunk_list(contact_ids, 100): + for id_batch in itertools.batched(contact_ids, 100): batch = Contact.objects.filter(id__in=id_batch, is_active=True).prefetch_related("urns") for contact in batch: contact.release(user) diff --git a/temba/contacts/templatetags/contacts.py b/temba/contacts/templatetags/contacts.py index 183ab999a45..a469bcbd5f8 100644 --- a/temba/contacts/templatetags/contacts.py +++ b/temba/contacts/templatetags/contacts.py @@ -24,16 +24,14 @@ @register.simple_tag() -def contact_field(contact, key): - field = contact.org.fields.filter(is_active=True, key=key).first() - if field is None: - return MISSING_VALUE - +def contact_field(contact, field): value = contact.get_field_display(field) + if value and field.value_type == ContactField.TYPE_DATETIME: value = contact.get_field_value(field) if value: - return mark_safe(f"") + display = "timedate" if field.is_proxy else "date" + return mark_safe(f"") return value or MISSING_VALUE diff --git a/temba/contacts/templatetags/tests.py b/temba/contacts/templatetags/tests.py deleted file mode 100644 index 235cb1f4e8a..00000000000 --- a/temba/contacts/templatetags/tests.py +++ /dev/null @@ -1,49 +0,0 @@ -from temba.tests import TembaTest - -from .contacts import format_urn, name_or_urn, urn_icon, urn_or_anon - - -class ContactsTest(TembaTest): - def test_name_or_urn(self): - contact1 = self.create_contact("", urns=[]) - contact2 = self.create_contact("Ann", urns=[]) - contact3 = self.create_contact("Bob", urns=["tel:+12024561111", "telegram:098761111"]) - contact4 = self.create_contact("", urns=["tel:+12024562222", "telegram:098762222"]) - - self.assertEqual("", name_or_urn(contact1, self.org)) - self.assertEqual("Ann", name_or_urn(contact2, self.org)) - self.assertEqual("Bob", name_or_urn(contact3, self.org)) - self.assertEqual("(202) 456-2222", name_or_urn(contact4, self.org)) - - with self.anonymous(self.org): - self.assertEqual(f"{contact1.id:010}", name_or_urn(contact1, self.org)) - self.assertEqual("Ann", name_or_urn(contact2, self.org)) - self.assertEqual("Bob", name_or_urn(contact3, self.org)) - self.assertEqual(f"{contact4.id:010}", name_or_urn(contact4, self.org)) - - def test_urn_or_anon(self): - contact1 = self.create_contact("Bob", urns=[]) - contact2 = self.create_contact("Uri", urns=["tel:+12024561414", "telegram:098765432"]) - - self.assertEqual("--", urn_or_anon(contact1, self.org)) - self.assertEqual("+1 202-456-1414", urn_or_anon(contact2, self.org)) - - with self.anonymous(self.org): - self.assertEqual(f"{contact1.id:010}", urn_or_anon(contact1, self.org)) - self.assertEqual(f"{contact2.id:010}", urn_or_anon(contact2, self.org)) - - def test_urn_icon(self): - contact = self.create_contact("Uri", urns=["tel:+1234567890", "telegram:098765432", "viber:346376373"]) - tel_urn, tg_urn, viber_urn = contact.urns.order_by("-priority") - - self.assertEqual("icon-phone", urn_icon(tel_urn)) - self.assertEqual("icon-telegram", urn_icon(tg_urn)) - self.assertEqual("", urn_icon(viber_urn)) - - def test_format_urn(self): - contact = self.create_contact("Uri", urns=["tel:+12024561414"]) - - self.assertEqual("+1 202-456-1414", format_urn(contact.get_urn(), self.org)) - - with self.anonymous(self.org): - self.assertEqual("••••••••", format_urn(contact.get_urn(), self.org)) diff --git a/temba/contacts/tests.py b/temba/contacts/tests.py deleted file mode 100644 index ec46dd1c8e4..00000000000 --- a/temba/contacts/tests.py +++ /dev/null @@ -1,5268 +0,0 @@ -import io -import tempfile -from datetime import date, datetime, timedelta, timezone as tzone -from decimal import Decimal -from unittest.mock import call, patch -from uuid import UUID -from zoneinfo import ZoneInfo - -import iso8601 -from openpyxl import load_workbook - -from django.core.files.storage import default_storage -from django.core.validators import ValidationError -from django.db.models import Value as DbValue -from django.db.models.functions import Concat, Substr -from django.db.utils import IntegrityError -from django.test.utils import override_settings -from django.urls import reverse -from django.utils import timezone - -from temba import mailroom -from temba.airtime.models import AirtimeTransfer -from temba.campaigns.models import Campaign, CampaignEvent, EventFire -from temba.channels.models import ChannelEvent -from temba.flows.models import Flow, FlowSession, FlowStart -from temba.ivr.models import Call -from temba.locations.models import AdminBoundary -from temba.mailroom import modifiers -from temba.msgs.models import Msg, SystemLabel -from temba.orgs.models import Export, Org, OrgRole -from temba.schedules.models import Schedule -from temba.tests import CRUDLTestMixin, MigrationTest, MockResponse, TembaTest, matchers, mock_mailroom -from temba.tests.engine import MockSessionWriter -from temba.tickets.models import Ticket, TicketCount, Topic -from temba.triggers.models import Trigger -from temba.utils import json, s3 -from temba.utils.dates import datetime_to_timestamp -from temba.utils.views import TEMBA_MENU_SELECTION - -from .models import ( - URN, - Contact, - ContactExport, - ContactField, - ContactGroup, - ContactGroupCount, - ContactImport, - ContactImportBatch, - ContactURN, -) -from .tasks import squash_group_counts -from .templatetags.contacts import contact_field, msg_status_badge - - -class ContactCRUDLTest(CRUDLTestMixin, TembaTest): - def setUp(self): - super().setUp() - - self.country = AdminBoundary.create(osm_id="171496", name="Rwanda", level=0) - AdminBoundary.create(osm_id="1708283", name="Kigali", level=1, parent=self.country) - - self.create_field("age", "Age", value_type="N") - self.create_field("home", "Home", value_type="S", priority=10) - - # sample flows don't actually get created by org initialization during tests because there are no users at that - # point so create them explicitly here, so that we also get the sample groups - self.org.create_sample_flows("https://api.rapidpro.io") - - def test_menu(self): - menu_url = reverse("contacts.contact_menu") - - self.assertRequestDisallowed(menu_url, [None, self.agent]) - self.assertPageMenu( - menu_url, - self.admin, - [ - "Active (0)", - "Archived (0)", - "Blocked (0)", - "Stopped (0)", - "Import", - "Fields (2)", - ("Groups", ["Open Tickets (0)", "Survey Audience (0)", "Unsatisfied Customers (0)"]), - ], - ) - - @mock_mailroom - def test_create(self, mr_mocks): - create_url = reverse("contacts.contact_create") - - self.assertRequestDisallowed(create_url, [None, self.agent, self.user]) - self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=("name", "phone")) - - # simulate validation failing because phone number taken - mr_mocks.contact_urns({"tel:+250781111111": 12345678}) - - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Joe", "phone": "+250781111111"}, - form_errors={"phone": "In use by another contact."}, - ) - - # simulate validation failing because phone number isn't E164 - mr_mocks.contact_urns({"tel:+250781111111": False}) - - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Joe", "phone": "+250781111111"}, - form_errors={"phone": "Ensure number includes country code."}, - ) - - # simulate validation failing because phone number isn't valid - mr_mocks.contact_urns({"tel:xx": "URN 0 invalid"}) - - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Joe", "phone": "xx"}, - form_errors={"phone": "Invalid phone number."}, - ) - - # try valid number - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Joe", "phone": "+250782222222"}, - new_obj_query=Contact.objects.filter(org=self.org, name="Joe", urns__identity="tel:+250782222222"), - success_status=200, - ) - - @mock_mailroom - def test_list(self, mr_mocks): - self.login(self.user) - list_url = reverse("contacts.contact_list") - - joe = self.create_contact("Joe", phone="123", fields={"age": "20", "home": "Kigali"}) - frank = self.create_contact("Frank", phone="124", fields={"age": "18"}) - - mr_mocks.contact_search('name != ""', contacts=[]) - self.create_group("No Name", query='name = ""') - - with self.assertNumQueries(15): - response = self.client.get(list_url) - - self.assertEqual([frank, joe], list(response.context["object_list"])) - self.assertIsNone(response.context["search_error"]) - self.assertEqual([], list(response.context["actions"])) - self.assertContentMenu(list_url, self.user, ["Export"]) - - active_contacts = self.org.active_contacts_group - - # fetch with spa flag - response = self.client.get(list_url, content_type="application/json", HTTP_TEMBA_SPA="1") - self.assertEqual(response.context["base_template"], "spa.html") - - mr_mocks.contact_search("age = 18", contacts=[frank]) - - response = self.client.get(list_url + "?search=age+%3D+18") - self.assertEqual(list(response.context["object_list"]), [frank]) - self.assertEqual(response.context["search"], "age = 18") - self.assertEqual(response.context["save_dynamic_search"], True) - self.assertIsNone(response.context["search_error"]) - self.assertEqual(list(response.context["contact_fields"].values_list("name", flat=True)), ["Home", "Age"]) - - mr_mocks.contact_search("age = 18", contacts=[frank], total=10020) - - # we return up to 10000 contacts when searching with ES, so last page is 200 - url = f'{reverse("contacts.contact_list")}?{"search=age+%3D+18&page=200"}' - response = self.client.get(url) - - self.assertEqual(response.status_code, 200) - - # when user requests page 201, we return a 404, page not found - url = f'{reverse("contacts.contact_list")}?{"search=age+%3D+18&page=201"}' - response = self.client.get(url) - - self.assertEqual(response.status_code, 404) - - mr_mocks.contact_search('age > 18 and home = "Kigali"', cleaned='age > 18 AND home = "Kigali"', contacts=[joe]) - - response = self.client.get(list_url + '?search=age+>+18+and+home+%3D+"Kigali"') - self.assertEqual(list(response.context["object_list"]), [joe]) - self.assertEqual(response.context["search"], 'age > 18 AND home = "Kigali"') - self.assertEqual(response.context["save_dynamic_search"], True) - self.assertIsNone(response.context["search_error"]) - - mr_mocks.contact_search("Joe", cleaned='name ~ "Joe"', contacts=[joe]) - - response = self.client.get(list_url + "?search=Joe") - self.assertEqual(list(response.context["object_list"]), [joe]) - self.assertEqual(response.context["search"], 'name ~ "Joe"') - self.assertEqual(response.context["save_dynamic_search"], True) - self.assertIsNone(response.context["search_error"]) - - with self.anonymous(self.org): - mr_mocks.contact_search(f"{joe.id}", cleaned=f"id = {joe.id}", contacts=[joe]) - - response = self.client.get(list_url + f"?search={joe.id}") - self.assertEqual(list(response.context["object_list"]), [joe]) - self.assertIsNone(response.context["search_error"]) - self.assertEqual(response.context["search"], f"id = {joe.id}") - self.assertEqual(response.context["save_dynamic_search"], False) - - # try with invalid search string - mr_mocks.exception(mailroom.QueryValidationException("mismatched input at (((", "syntax")) - - response = self.client.get(list_url + "?search=(((") - self.assertEqual(list(response.context["object_list"]), []) - self.assertEqual(response.context["search_error"], "Invalid query syntax.") - self.assertContains(response, "Invalid query syntax.") - - self.login(self.admin) - - # admins can see bulk actions - age_query = "?search=age%20%3E%2050" - response = self.client.get(list_url) - self.assertEqual([frank, joe], list(response.context["object_list"])) - self.assertEqual(["block", "archive", "send", "start-flow"], list(response.context["actions"])) - - self.assertContentMenu( - list_url, - self.admin, - ["New Contact", "New Group", "Export"], - ) - self.assertContentMenu( - list_url + age_query, - self.admin, - ["Create Smart Group", "New Contact", "New Group", "Export"], - ) - - # TODO: group labeling as a feature is on probation - # self.client.post(list_url, {"action": "label", "objects": frank.id, "label": survey_audience.id}) - # self.assertIn(frank, survey_audience.contacts.all()) - - # try label bulk action against search results - # self.client.post(list_url + "?search=Joe", {"action": "label", "objects": joe.id, "label": survey_audience.id}) - # self.assertIn(joe, survey_audience.contacts.all()) - - # self.assertEqual( - # call(self.org.id, group_uuid=str(active_contacts.uuid), query="Joe", sort="", offset=0, exclude_ids=[]), - # mr_mocks.calls["contact_search"][-1], - # ) - - # try archive bulk action - self.client.post(list_url + "?search=Joe", {"action": "archive", "objects": joe.id}) - - # we re-run the search for the response, but exclude Joe - self.assertEqual( - call(self.org, active_contacts, "Joe", sort="", offset=0, exclude_ids=[joe.id]), - mr_mocks.calls["contact_search"][-1], - ) - - response = self.client.get(list_url) - self.assertEqual([frank], list(response.context["object_list"])) - - joe.refresh_from_db() - self.assertEqual(Contact.STATUS_ARCHIVED, joe.status) - - @mock_mailroom - def test_blocked(self, mr_mocks): - joe = self.create_contact("Joe", urns=["twitter:joe"]) - frank = self.create_contact("Frank", urns=["twitter:frank"]) - billy = self.create_contact("Billy", urns=["twitter:billy"]) - self.create_contact("Mary", urns=["twitter:mary"]) - - joe.block(self.admin) - frank.block(self.admin) - billy.block(self.admin) - - self.login(self.user) - - blocked_url = reverse("contacts.contact_blocked") - - self.assertRequestDisallowed(blocked_url, [None, self.agent]) - response = self.assertListFetch(blocked_url, [self.editor, self.admin], context_objects=[billy, frank, joe]) - self.assertEqual(["restore", "archive"], list(response.context["actions"])) - self.assertContentMenu(blocked_url, self.admin, ["Export"]) - - # try restore bulk action - self.client.post(blocked_url, {"action": "restore", "objects": billy.id}) - - response = self.client.get(blocked_url) - self.assertEqual([frank, joe], list(response.context["object_list"])) - - billy.refresh_from_db() - self.assertEqual(Contact.STATUS_ACTIVE, billy.status) - - # try archive bulk action - self.client.post(blocked_url, {"action": "archive", "objects": frank.id}) - - response = self.client.get(blocked_url) - self.assertEqual([joe], list(response.context["object_list"])) - - frank.refresh_from_db() - self.assertEqual(Contact.STATUS_ARCHIVED, frank.status) - - @mock_mailroom - def test_stopped(self, mr_mocks): - joe = self.create_contact("Joe", urns=["twitter:joe"]) - frank = self.create_contact("Frank", urns=["twitter:frank"]) - billy = self.create_contact("Billy", urns=["twitter:billy"]) - self.create_contact("Mary", urns=["twitter:mary"]) - - joe.stop(self.admin) - frank.stop(self.admin) - billy.stop(self.admin) - - self.login(self.user) - - stopped_url = reverse("contacts.contact_stopped") - - self.assertRequestDisallowed(stopped_url, [None, self.agent]) - response = self.assertListFetch( - stopped_url, [self.user, self.editor, self.admin], context_objects=[billy, frank, joe] - ) - self.assertEqual(["restore", "archive"], list(response.context["actions"])) - self.assertContentMenu(stopped_url, self.admin, ["Export"]) - - # try restore bulk action - self.client.post(stopped_url, {"action": "restore", "objects": billy.id}) - - response = self.client.get(stopped_url) - self.assertEqual([frank, joe], list(response.context["object_list"])) - - billy.refresh_from_db() - self.assertEqual(Contact.STATUS_ACTIVE, billy.status) - - # try archive bulk action - self.client.post(stopped_url, {"action": "archive", "objects": frank.id}) - - response = self.client.get(stopped_url) - self.assertEqual([joe], list(response.context["object_list"])) - - frank.refresh_from_db() - self.assertEqual(Contact.STATUS_ARCHIVED, frank.status) - - @patch("temba.contacts.models.Contact.BULK_RELEASE_IMMEDIATELY_LIMIT", 5) - @mock_mailroom - def test_archived(self, mr_mocks): - joe = self.create_contact("Joe", urns=["twitter:joe"]) - frank = self.create_contact("Frank", urns=["twitter:frank"]) - billy = self.create_contact("Billy", urns=["twitter:billy"]) - self.create_contact("Mary", urns=["twitter:mary"]) - - joe.archive(self.admin) - frank.archive(self.admin) - billy.archive(self.admin) - - self.login(self.user) - - archived_url = reverse("contacts.contact_archived") - - self.assertRequestDisallowed(archived_url, [None, self.agent]) - response = self.assertListFetch( - archived_url, [self.user, self.editor, self.admin], context_objects=[billy, frank, joe] - ) - self.assertEqual(["restore", "delete"], list(response.context["actions"])) - self.assertContentMenu(archived_url, self.admin, ["Export", "Delete All"]) - - # try restore bulk action - self.client.post(archived_url, {"action": "restore", "objects": billy.id}) - - response = self.client.get(archived_url) - self.assertEqual([frank, joe], list(response.context["object_list"])) - - billy.refresh_from_db() - self.assertEqual(Contact.STATUS_ACTIVE, billy.status) - - # try delete bulk action - self.client.post(archived_url, {"action": "delete", "objects": frank.id}) - - response = self.client.get(archived_url) - self.assertEqual([joe], list(response.context["object_list"])) - - frank.refresh_from_db() - self.assertFalse(frank.is_active) - - # the archived view also supports deleting all - self.client.post(archived_url, {"action": "delete", "all": "true"}) - - response = self.client.get(archived_url) - self.assertEqual([], list(response.context["object_list"])) - - # only archived contacts affected - self.assertEqual(2, Contact.objects.filter(is_active=False, status=Contact.STATUS_ARCHIVED).count()) - self.assertEqual(2, Contact.objects.filter(is_active=False).count()) - - # for larger numbers of contacts, a background task is used - for c in range(6): - contact = self.create_contact(f"Bob{c}", urns=[f"twitter:bob{c}"]) - contact.archive(self.user) - - response = self.client.get(archived_url) - self.assertEqual(6, len(response.context["object_list"])) - - self.client.post(archived_url, {"action": "delete", "all": "true"}) - - response = self.client.get(archived_url) - self.assertEqual(0, len(response.context["object_list"])) - - @mock_mailroom - def test_filter(self, mr_mocks): - open_tickets = self.org.groups.get(name="Open Tickets") - joe = self.create_contact("Joe", phone="123") - frank = self.create_contact("Frank", phone="124") - self.create_contact("Bob", phone="125") - - mr_mocks.contact_search("age > 40", contacts=[frank], total=1) - - group1 = self.create_group("Testers", contacts=[joe, frank]) # static group - group2 = self.create_group("Oldies", query="age > 40") # smart group - group2.contacts.add(frank) - group3 = self.create_group("Other Org", org=self.org2) - - group1_url = reverse("contacts.contact_filter", args=[group1.uuid]) - group2_url = reverse("contacts.contact_filter", args=[group2.uuid]) - group3_url = reverse("contacts.contact_filter", args=[group3.uuid]) - open_tickets_url = reverse("contacts.contact_filter", args=[open_tickets.uuid]) - - self.assertRequestDisallowed(group1_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(group1_url, [self.user, self.editor, self.admin]) - - self.assertEqual([frank, joe], list(response.context["object_list"])) - self.assertEqual(["block", "unlabel", "send", "start-flow"], list(response.context["actions"])) - - self.assertContentMenu( - group1_url, - self.admin, - ["Edit", "Export", "Usages", "Delete"], - ) - - response = self.assertReadFetch(group2_url, [self.editor]) - - self.assertEqual([frank], list(response.context["object_list"])) - self.assertEqual(["block", "archive", "send", "start-flow"], list(response.context["actions"])) - self.assertContains(response, "age > 40") - - # can access system group like any other except no options to edit or delete - response = self.assertReadFetch(open_tickets_url, [self.editor]) - self.assertEqual([], list(response.context["object_list"])) - self.assertEqual(["block", "archive", "send", "start-flow"], list(response.context["actions"])) - self.assertContains(response, "tickets > 0") - self.assertContentMenu(open_tickets_url, self.admin, ["Export", "Usages"]) - - # if a user tries to access a non-existent group, that's a 404 - response = self.requestView(reverse("contacts.contact_filter", args=["21343253"]), self.admin) - self.assertEqual(404, response.status_code) - - # if a user tries to access a group in another org, send them to the login page - response = self.requestView(group3_url, self.admin) - self.assertLoginRedirect(response) - - # if the user has access to that org, we redirect to the org choose page - self.org2.add_user(self.admin, OrgRole.ADMINISTRATOR) - response = self.requestView(group3_url, self.admin) - self.assertRedirect(response, "/org/choose/") - - @mock_mailroom - def test_read(self, mr_mocks): - joe = self.create_contact("Joe", phone="123") - - read_url = reverse("contacts.contact_read", args=[joe.uuid]) - - response = self.client.get(read_url) - self.assertLoginRedirect(response) - - self.assertContentMenu(read_url, self.user, []) - self.assertContentMenu(read_url, self.editor, ["Edit", "Start Flow", "Open Ticket"]) - self.assertContentMenu(read_url, self.admin, ["Edit", "Start Flow", "Open Ticket"]) - - # if there's an open ticket already, don't show open ticket option - self.create_ticket(joe) - self.assertContentMenu(read_url, self.editor, ["Edit", "Start Flow"]) - - # login as viewer - self.login(self.user) - - response = self.client.get(read_url) - self.assertContains(response, "Joe") - - # login as admin - self.login(self.admin) - - response = self.client.get(read_url) - self.assertContains(response, "Joe") - self.assertEqual("/contact/active", response.headers[TEMBA_MENU_SELECTION]) - - # block the contact - joe.block(self.admin) - self.assertTrue(Contact.objects.get(pk=joe.id, status="B")) - - self.assertContentMenu(read_url, self.admin, ["Edit"]) - - response = self.client.get(read_url) - self.assertContains(response, "Joe") - self.assertEqual("/contact/blocked", response.headers[TEMBA_MENU_SELECTION]) - - # can't access a deleted contact - joe.release(self.admin) - - response = self.client.get(read_url) - self.assertEqual(response.status_code, 404) - - # contact with only a urn - nameless = self.create_contact("", urns=["twitter:bobby_anon"]) - response = self.client.get(reverse("contacts.contact_read", args=[nameless.uuid])) - self.assertContains(response, "bobby_anon") - - # contact without name or urn - nameless = Contact.objects.create(org=self.org) - response = self.client.get(reverse("contacts.contact_read", args=[nameless.uuid])) - self.assertContains(response, "Contact Details") - - # invalid uuid should return 404 - response = self.client.get(reverse("contacts.contact_read", args=["invalid-uuid"])) - self.assertEqual(response.status_code, 404) - - @mock_mailroom - def test_update(self, mr_mocks): - self.org.flow_languages = ["eng", "spa"] - self.org.save(update_fields=("flow_languages",)) - - self.create_field("gender", "Gender", value_type=ContactField.TYPE_TEXT) - contact = self.create_contact( - "Bob", - urns=["tel:+593979111111", "tel:+593979222222", "telegram:5474754"], - fields={"age": 41, "gender": "M"}, - language="eng", - ) - testers = self.create_group("Testers", contacts=[contact]) - self.create_contact("Ann", urns=["tel:+593979444444"]) - - update_url = reverse("contacts.contact_update", args=[contact.id]) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields={ - "name": "Bob", - "status": "A", - "language": "eng", - "groups": [testers], - "new_scheme": None, - "new_path": None, - "urn__tel__0": "+593979111111", - "urn__tel__1": "+593979222222", - "urn__telegram__2": "5474754", - }, - ) - - # try to take URN in use by another contact - mr_mocks.contact_urns({"tel:+593979444444": 12345678}) - - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Bobby", "status": "B", "language": "spa", "groups": [testers.id], "urn__tel__0": "+593979444444"}, - form_errors={"urn__tel__0": "In use by another contact."}, - object_unchanged=contact, - ) - - # try to update to an invalid URN - mr_mocks.contact_urns({"tel:++++": "invalid path component"}) - - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Bobby", "status": "B", "language": "spa", "groups": [testers.id], "urn__tel__0": "++++"}, - form_errors={"urn__tel__0": "Invalid format."}, - object_unchanged=contact, - ) - - # try to add a new invalid phone URN - mr_mocks.contact_urns({"tel:123": "not a valid phone number"}) - - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "Bobby", - "status": "B", - "language": "spa", - "groups": [testers.id], - "urn__tel__0": "+593979111111", - "new_scheme": "tel", - "new_path": "123", - }, - form_errors={"new_path": "Invalid format."}, - object_unchanged=contact, - ) - - # try to add a new phone URN that isn't E164 - mr_mocks.contact_urns({"tel:123": False}) - - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "Bobby", - "status": "B", - "language": "spa", - "groups": [testers.id], - "urn__tel__0": "+593979111111", - "new_scheme": "tel", - "new_path": "123", - }, - form_errors={"new_path": "Invalid phone number. Ensure number includes country code."}, - object_unchanged=contact, - ) - - # update all fields (removes second tel URN, adds a new Facebook URN) - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "Bobby", - "status": "B", - "language": "spa", - "groups": [testers.id], - "urn__tel__0": "+593979333333", - "urn__telegram__2": "78686776", - "new_scheme": "facebook", - "new_path": "9898989", - }, - success_status=200, - ) - - contact.refresh_from_db() - self.assertEqual("Bobby", contact.name) - self.assertEqual(Contact.STATUS_BLOCKED, contact.status) - self.assertEqual("spa", contact.language) - self.assertEqual({testers}, set(contact.get_groups())) - self.assertEqual( - ["tel:+593979333333", "telegram:78686776", "facebook:9898989"], - [u.identity for u in contact.urns.order_by("-priority")], - ) - - # for non-active contacts, shouldn't see groups on form - self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields={ - "name": "Bobby", - "status": "B", - "language": "spa", - "new_scheme": None, - "new_path": None, - "urn__tel__0": "+593979333333", - "urn__telegram__1": "78686776", - "urn__facebook__2": "9898989", - }, - ) - - # try to update with invalid URNs - mr_mocks.contact_urns({"tel:456": "invalid path component", "facebook:xxxxx": "invalid path component"}) - - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "Bobby", - "status": "B", - "language": "spa", - "groups": [], - "urn__tel__0": "456", - "urn__facebook__2": "xxxxx", - }, - form_errors={ - "urn__tel__0": "Invalid format.", - "urn__facebook__2": "Invalid format.", - }, - object_unchanged=contact, - ) - - # if contact has a language which is no longer a flow language, it should still be a valid option on the form - contact.language = "kin" - contact.save(update_fields=("language",)) - - response = self.assertUpdateFetch( - update_url, - [self.admin], - form_fields={ - "name": "Bobby", - "status": "B", - "language": "kin", - "new_scheme": None, - "new_path": None, - "urn__tel__0": "+593979333333", - "urn__telegram__1": "78686776", - "urn__facebook__2": "9898989", - }, - ) - self.assertContains(response, "Kinyarwanda") - - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "Bobby", - "status": "A", - "language": "kin", - "urn__tel__0": "+593979333333", - "urn__telegram__1": "78686776", - "urn__facebook__2": "9898989", - }, - success_status=200, - ) - - contact.refresh_from_db() - self.assertEqual("Bobby", contact.name) - self.assertEqual(Contact.STATUS_ACTIVE, contact.status) - self.assertEqual("kin", contact.language) - - @mock_mailroom - def test_update_with_mailroom_error(self, mr_mocks): - mr_mocks.exception(mailroom.RequestException("", "", MockResponse(400, '{"error": "Error updating contact"}'))) - - contact = self.create_contact("Joe", phone="1234") - - self.login(self.admin) - - response = self.client.post( - reverse("contacts.contact_update", args=[contact.id]), - {"name": "Joe", "status": Contact.STATUS_ACTIVE, "language": "eng"}, - ) - - self.assertFormError( - response.context["form"], None, "An error occurred updating your contact. Please try again later." - ) - - @mock_mailroom - def test_export(self, mr_mocks): - export_url = reverse("contacts.contact_export") - - self.assertRequestDisallowed(export_url, [None, self.agent]) - response = self.assertUpdateFetch(export_url, [self.editor, self.admin], form_fields=("with_groups",)) - self.assertNotContains(response, "already an export in progress") - - # create a dummy export task so that we won't be able to export - blocking_export = ContactExport.create(self.org, self.admin) - - response = self.client.get(export_url) - self.assertContains(response, "already an export in progress") - - # check we can't submit in case a user opens the form and whilst another user is starting an export - response = self.client.post(export_url, {}) - self.assertContains(response, "already an export in progress") - self.assertEqual(1, Export.objects.count()) - - # mark that one as finished so it's no longer a blocker - blocking_export.status = Export.STATUS_COMPLETE - blocking_export.save(update_fields=("status",)) - - # try to export a group that is too big - big_group = self.create_group("Big Group", contacts=[]) - mr_mocks.contact_export_preview(1_000_123) - - response = self.client.get(export_url + f"?g={big_group.uuid}") - self.assertContains(response, "This group or search is too large to export.") - - response = self.client.post( - export_url + f"?g={self.org.active_contacts_group.uuid}", {"with_groups": [big_group.id]} - ) - self.assertEqual(200, response.status_code) - - export = Export.objects.exclude(id=blocking_export.id).get() - self.assertEqual("contact", export.export_type) - self.assertEqual( - {"group_id": self.org.active_contacts_group.id, "search": None, "with_groups": [big_group.id]}, - export.config, - ) - - def test_scheduled(self): - contact1 = self.create_contact("Joe", phone="+1234567890") - contact2 = self.create_contact("Frank", phone="+1204567802") - farmers = self.create_group("Farmers", contacts=[contact1, contact2]) - - schedule_url = reverse("contacts.contact_scheduled", args=[contact1.uuid]) - - self.assertRequestDisallowed(schedule_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(schedule_url, [self.user, self.editor, self.admin]) - self.assertEqual({"results": []}, response.json()) - - # create a campaign and event fires for this contact - campaign = Campaign.create(self.org, self.admin, "Reminders", farmers) - joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) - event2_flow = self.create_flow("Reminder Flow") - event1 = CampaignEvent.create_message_event(self.org, self.admin, campaign, joined, 2, unit="D", message="Hi") - event2 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, joined, 2, unit="D", flow=event2_flow) - fire1 = EventFire.objects.create(event=event1, contact=contact1, scheduled=timezone.now() + timedelta(days=2)) - fire2 = EventFire.objects.create(event=event2, contact=contact1, scheduled=timezone.now() + timedelta(days=5)) - - # create scheduled and regular broadcasts which send to both groups - bcast1 = self.create_broadcast( - self.admin, - {"eng": {"text": "Hi again"}}, - contacts=[contact1, contact2], - schedule=Schedule.create(self.org, timezone.now() + timedelta(days=3), Schedule.REPEAT_DAILY), - ) - self.create_broadcast(self.admin, {"eng": {"text": "Bye"}}, contacts=[contact1, contact2]) # not scheduled - - # create scheduled trigger which this contact is explicitly added to - trigger1_flow = self.create_flow("Favorites 1") - trigger1 = Trigger.create( - self.org, - self.admin, - trigger_type=Trigger.TYPE_SCHEDULE, - flow=trigger1_flow, - schedule=Schedule.create(self.org, timezone.now() + timedelta(days=4), Schedule.REPEAT_WEEKLY), - ) - trigger1.contacts.add(contact1, contact2) - - # create scheduled trigger which this contact is added to via a group - trigger2_flow = self.create_flow("Favorites 2") - trigger2 = Trigger.create( - self.org, - self.admin, - trigger_type=Trigger.TYPE_SCHEDULE, - flow=trigger2_flow, - schedule=Schedule.create(self.org, timezone.now() + timedelta(days=6), Schedule.REPEAT_MONTHLY), - ) - trigger2.groups.add(farmers) - - # create scheduled trigger which this contact is explicitly added to... but also excluded from - trigger3 = Trigger.create( - self.org, - self.admin, - trigger_type=Trigger.TYPE_SCHEDULE, - flow=self.create_flow("Favorites 3"), - schedule=Schedule.create(self.org, timezone.now() + timedelta(days=4), Schedule.REPEAT_WEEKLY), - ) - trigger3.contacts.add(contact1, contact2) - trigger3.exclude_groups.add(farmers) - - response = self.requestView(schedule_url, self.admin) - self.assertEqual( - { - "results": [ - { - "type": "campaign_event", - "scheduled": fire1.scheduled.isoformat(), - "repeat_period": None, - "campaign": {"uuid": str(campaign.uuid), "name": "Reminders"}, - "message": "Hi", - }, - { - "type": "scheduled_broadcast", - "scheduled": bcast1.schedule.next_fire.astimezone(tzone.utc).isoformat(), - "repeat_period": "D", - "message": "Hi again", - }, - { - "type": "scheduled_trigger", - "scheduled": trigger1.schedule.next_fire.astimezone(tzone.utc).isoformat(), - "repeat_period": "W", - "flow": {"uuid": str(trigger1_flow.uuid), "name": "Favorites 1"}, - }, - { - "type": "campaign_event", - "scheduled": fire2.scheduled.isoformat(), - "repeat_period": None, - "campaign": {"uuid": str(campaign.uuid), "name": "Reminders"}, - "flow": {"uuid": str(event2_flow.uuid), "name": "Reminder Flow"}, - }, - { - "type": "scheduled_trigger", - "scheduled": trigger2.schedule.next_fire.astimezone(tzone.utc).isoformat(), - "repeat_period": "M", - "flow": {"uuid": str(trigger2_flow.uuid), "name": "Favorites 2"}, - }, - ] - }, - response.json(), - ) - - # fires for archived campaigns shouldn't appear - campaign.archive(self.admin) - - response = self.requestView(schedule_url, self.admin) - self.assertEqual(3, len(response.json()["results"])) - - @mock_mailroom - def test_open_ticket(self, mr_mocks): - contact = self.create_contact("Joe", phone="+593979000111") - general = self.org.default_ticket_topic - open_url = reverse("contacts.contact_open_ticket", args=[contact.id]) - - self.assertRequestDisallowed(open_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch(open_url, [self.editor, self.admin], form_fields=("topic", "assignee", "note")) - - # can submit with no assignee - response = self.assertUpdateSubmit(open_url, self.admin, {"topic": general.id, "body": "Help", "assignee": ""}) - - # should have new ticket - ticket = contact.tickets.get() - self.assertEqual(general, ticket.topic) - self.assertIsNone(ticket.assignee) - - # and we're redirected to that ticket - self.assertRedirect(response, f"/ticket/all/open/{ticket.uuid}/") - - @mock_mailroom - def test_interrupt(self, mr_mocks): - contact = self.create_contact("Joe", phone="+593979000111") - other_org_contact = self.create_contact("Hans", phone="+593979123456", org=self.org2) - - read_url = reverse("contacts.contact_read", args=[contact.uuid]) - interrupt_url = reverse("contacts.contact_interrupt", args=[contact.id]) - - self.login(self.admin) - - # no interrupt option if not in a flow - response = self.client.get(read_url) - self.assertNotContains(response, interrupt_url) - - MockSessionWriter(contact, self.create_flow("Test")).wait().save() - MockSessionWriter(other_org_contact, self.create_flow("Test", org=self.org2)).wait().save() - - # now it's an option - self.assertContentMenu(read_url, self.admin, ["Edit", "Start Flow", "Open Ticket", "Interrupt"]) - - # can't interrupt if not logged in - self.client.logout() - response = self.client.post(interrupt_url, {"id": contact.id}) - self.assertLoginRedirect(response) - - self.login(self.user) - - # can't interrupt if just regular user - response = self.client.post(interrupt_url, {"id": contact.id}) - self.assertLoginRedirect(response) - - self.login(self.admin) - - response = self.client.post(interrupt_url, {"id": contact.id}) - self.assertEqual(302, response.status_code) - - contact.refresh_from_db() - self.assertIsNone(contact.current_flow) - - # can't interrupt contact in other org - restore_url = reverse("contacts.contact_interrupt", args=[other_org_contact.id]) - response = self.client.post(restore_url, {"id": other_org_contact.id}) - self.assertLoginRedirect(response) - - # contact should be unchanged - other_org_contact.refresh_from_db() - self.assertIsNotNone(other_org_contact.current_flow) - - def test_delete(self): - contact = self.create_contact("Joe", phone="+593979000111") - other_org_contact = self.create_contact("Hans", phone="+593979123456", org=self.org2) - - delete_url = reverse("contacts.contact_delete", args=[contact.id]) - - # can't delete if not logged in - response = self.client.post(delete_url, {"id": contact.id}) - self.assertLoginRedirect(response) - - self.login(self.user) - - # can't delete if just regular user - response = self.client.post(delete_url, {"id": contact.id}) - self.assertLoginRedirect(response) - - self.login(self.admin) - - response = self.client.post(delete_url, {"id": contact.id}) - self.assertEqual(302, response.status_code) - - contact.refresh_from_db() - self.assertFalse(contact.is_active) - - # can't delete contact in other org - delete_url = reverse("contacts.contact_delete", args=[other_org_contact.id]) - response = self.client.post(delete_url, {"id": other_org_contact.id}) - self.assertLoginRedirect(response) - - # contact should be unchanged - other_org_contact.refresh_from_db() - self.assertTrue(other_org_contact.is_active) - - @mock_mailroom - def test_start(self, mr_mocks): - sample_flows = list(self.org.flows.order_by("name")) - background_flow = self.get_flow("background") - self.get_flow("media_survey") - archived_flow = self.get_flow("color") - archived_flow.archive(self.admin) - - contact = self.create_contact("Joe", phone="+593979000111") - start_url = f"{reverse('flows.flow_start', args=[])}?flow={sample_flows[0].id}&c={contact.uuid}" - - self.assertRequestDisallowed(start_url, [None, self.user, self.agent]) - response = self.assertUpdateFetch(start_url, [self.editor, self.admin], form_fields=["flow", "contact_search"]) - - self.assertEqual([background_flow] + sample_flows, list(response.context["form"].fields["flow"].queryset)) - - # try to submit without specifying a flow - self.assertUpdateSubmit( - start_url, - self.admin, - data={}, - form_errors={"flow": "This field is required.", "contact_search": "This field is required."}, - object_unchanged=contact, - ) - - # submit with flow... - contact_search = dict(query=f"uuid='{contact.uuid}'", advanced=True) - self.assertUpdateSubmit( - start_url, self.admin, {"flow": background_flow.id, "contact_search": json.dumps(contact_search)} - ) - - # should now have a flow start - start = FlowStart.objects.get() - self.assertEqual(background_flow, start.flow) - self.assertEqual(contact_search["query"], start.query) - self.assertEqual({}, start.exclusions) - - # that has been queued to mailroom - self.assertEqual("start_flow", mr_mocks.queued_batch_tasks[-1]["type"]) - - -class ContactGroupTest(TembaTest): - def setUp(self): - super().setUp() - - self.joe = self.create_contact("Joe Blow", phone="123", fields={"age": "17", "gender": "male"}) - self.frank = self.create_contact("Frank Smith", phone="1234") - self.mary = self.create_contact("Mary Mo", phone="345", fields={"age": "21", "gender": "female"}) - - def test_create_manual(self): - group = ContactGroup.create_manual(self.org, self.admin, "group one") - - self.assertEqual(group.org, self.org) - self.assertEqual(group.name, "group one") - self.assertEqual(group.created_by, self.admin) - self.assertEqual(group.status, ContactGroup.STATUS_READY) - - # can't call update_query on a manual group - self.assertRaises(AssertionError, group.update_query, "gender=M") - - # assert failure if group name is blank - self.assertRaises(AssertionError, ContactGroup.create_manual, self.org, self.admin, " ") - - @mock_mailroom - def test_create_smart(self, mr_mocks): - age = self.org.fields.get(key="age") - gender = self.org.fields.get(key="gender") - - # create a dynamic group using a query - query = '(Age < 18 and gender = "male") or (Age > 18 and gender = "female")' - - group = ContactGroup.create_smart(self.org, self.admin, "Group two", query) - group.refresh_from_db() - - self.assertEqual(query, group.query) - self.assertEqual({age, gender}, set(group.query_fields.all())) - self.assertEqual(ContactGroup.STATUS_INITIALIZING, group.status) - - # update group query - mr_mocks.contact_parse_query("age > 18 and name ~ Mary", cleaned='age > 18 AND name ~ "Mary"') - group.update_query("age > 18 and name ~ Mary") - group.refresh_from_db() - - self.assertEqual(group.query, 'age > 18 AND name ~ "Mary"') - self.assertEqual(set(group.query_fields.all()), {age}) - self.assertEqual(group.status, ContactGroup.STATUS_INITIALIZING) - - # try to update group query to something invalid - mr_mocks.exception(mailroom.QueryValidationException("no valid", "syntax")) - with self.assertRaises(ValueError): - group.update_query("age ~ Mary") - - # can't create a dynamic group with empty query - self.assertRaises(AssertionError, ContactGroup.create_smart, self.org, self.admin, "Empty", "") - - # can't create a dynamic group with id attribute - self.assertRaises(ValueError, ContactGroup.create_smart, self.org, self.admin, "Bose", "id = 123") - - # dynamic group should not have remove to group button - self.login(self.admin) - filter_url = reverse("contacts.contact_filter", args=[group.uuid]) - self.client.get(filter_url) - - # put group back into evaluation state - group.status = ContactGroup.STATUS_EVALUATING - group.save(update_fields=("status",)) - - # dynamic groups should get their own icon - self.assertEqual(group.get_attrs(), {"icon": "group_smart"}) - - # can't update query again while it is in this state - with self.assertRaises(AssertionError): - group.update_query("age = 18") - - def test_get_or_create(self): - group = ContactGroup.get_or_create(self.org, self.user, "first") - self.assertEqual(group.name, "first") - self.assertFalse(group.is_smart) - - # name look up is case insensitive - self.assertEqual(ContactGroup.get_or_create(self.org, self.user, "FIRST"), group) - - # fetching by id shouldn't modify original group - self.assertEqual(ContactGroup.get_or_create(self.org, self.user, "Kigali", uuid=group.uuid), group) - - group.refresh_from_db() - self.assertEqual(group.name, "first") - - @mock_mailroom - def test_get_groups(self, mr_mocks): - manual = ContactGroup.create_manual(self.org, self.admin, "Static") - deleted = ContactGroup.create_manual(self.org, self.admin, "Deleted") - deleted.is_active = False - deleted.save() - - open_tickets = self.org.groups.get(name="Open Tickets") - females = ContactGroup.create_smart(self.org, self.admin, "Females", "gender=F") - males = ContactGroup.create_smart(self.org, self.admin, "Males", "gender=M") - ContactGroup.objects.filter(id=males.id).update(status=ContactGroup.STATUS_READY) - - self.assertEqual(set(ContactGroup.get_groups(self.org)), {open_tickets, manual, females, males}) - self.assertEqual(set(ContactGroup.get_groups(self.org, manual_only=True)), {manual}) - self.assertEqual(set(ContactGroup.get_groups(self.org, ready_only=True)), {open_tickets, manual, males}) - - def test_get_unique_name(self): - self.assertEqual("Testers", ContactGroup.get_unique_name(self.org, "Testers")) - - # ensure checking against existing groups is case-insensitive - self.create_group("TESTERS", contacts=[]) - - self.assertEqual("Testers 2", ContactGroup.get_unique_name(self.org, "Testers")) - self.assertEqual("Testers", ContactGroup.get_unique_name(self.org2, "Testers")) # different org - - self.create_group("Testers 2", contacts=[]) - - self.assertEqual("Testers 3", ContactGroup.get_unique_name(self.org, "Testers")) - - # ensure we don't exceed the name length limit - self.create_group("X" * 64, contacts=[]) - - self.assertEqual(f"{'X' * 62} 2", ContactGroup.get_unique_name(self.org, "X" * 64)) - - @mock_mailroom - def test_member_count(self, mr_mocks): - group = self.create_group("Cool kids") - group.contacts.add(self.joe, self.frank) - - self.assertEqual(ContactGroup.objects.get(pk=group.pk).get_member_count(), 2) - - group.contacts.add(self.mary) - - self.assertEqual(ContactGroup.objects.get(pk=group.pk).get_member_count(), 3) - - group.contacts.remove(self.mary) - - self.assertEqual(ContactGroup.objects.get(pk=group.pk).get_member_count(), 2) - - # blocking a contact removes them from all user groups - self.joe.block(self.user) - - group = ContactGroup.objects.get(pk=group.pk) - self.assertEqual(group.get_member_count(), 1) - self.assertEqual(set(group.contacts.all()), {self.frank}) - - # releasing removes from all user groups - self.frank.release(self.user) - - group = ContactGroup.objects.get(pk=group.pk) - self.assertEqual(group.get_member_count(), 0) - self.assertEqual(set(group.contacts.all()), set()) - - @mock_mailroom - def test_status_group_counts(self, mr_mocks): - # start with no contacts - for contact in Contact.objects.all(): - contact.release(self.admin) - contact.delete() - - counts = Contact.get_status_counts(self.org) - self.assertEqual( - counts, - { - Contact.STATUS_ACTIVE: 0, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - self.create_contact("Hannibal", phone="0783835001") - face = self.create_contact("Face", phone="0783835002") - ba = self.create_contact("B.A.", phone="0783835003") - murdock = self.create_contact("Murdock", phone="0783835004") - - counts = Contact.get_status_counts(self.org) - self.assertEqual( - counts, - { - Contact.STATUS_ACTIVE: 4, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - # call methods twice to check counts don't change twice - murdock.block(self.user) - murdock.block(self.user) - face.block(self.user) - ba.stop(self.user) - ba.stop(self.user) - - counts = Contact.get_status_counts(self.org) - self.assertEqual( - counts, - { - Contact.STATUS_ACTIVE: 1, - Contact.STATUS_BLOCKED: 2, - Contact.STATUS_STOPPED: 1, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - murdock.release(self.user) - murdock.release(self.user) - face.restore(self.user) - face.restore(self.user) - ba.restore(self.user) - ba.restore(self.user) - - # squash all our counts, this shouldn't affect our overall counts, but we should now only have 3 - squash_group_counts() - self.assertEqual(ContactGroupCount.objects.all().count(), 3) - - counts = Contact.get_status_counts(self.org) - self.assertEqual( - counts, - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - # rebuild just our system contact group - all_contacts = self.org.active_contacts_group - ContactGroupCount.populate_for_group(all_contacts) - - # assert our count is correct - self.assertEqual(all_contacts.get_member_count(), 3) - self.assertEqual(ContactGroupCount.objects.filter(group=all_contacts).count(), 1) - - @mock_mailroom - def test_release(self, mr_mocks): - contact1 = self.create_contact("Bob", phone="+1234567111") - contact2 = self.create_contact("Jim", phone="+1234567222") - contact3 = self.create_contact("Jim", phone="+1234567333") - group1 = self.create_group("Group One", contacts=[contact1, contact2]) - group2 = self.create_group("Group One", contacts=[contact2, contact3]) - - t1 = timezone.now() - - # create a campaign based on group 1 - a hard dependency - campaign = Campaign.create(self.org, self.admin, "Reminders", group1) - joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) - event = CampaignEvent.create_message_event(self.org, self.admin, campaign, joined, 2, unit="D", message="Hi") - EventFire.objects.create(event=event, contact=self.joe, scheduled=timezone.now() + timedelta(days=2)) - campaign.is_archived = True - campaign.save() - - # create scheduled and regular broadcasts which send to both groups - schedule = Schedule.create(self.org, timezone.now(), Schedule.REPEAT_DAILY) - bcast1 = self.create_broadcast(self.admin, {"eng": {"text": "Hi"}}, groups=[group1, group2], schedule=schedule) - bcast2 = self.create_broadcast(self.admin, {"eng": {"text": "Hi"}}, groups=[group1, group2]) - - # group still has a hard dependency so can't be released - with self.assertRaises(AssertionError): - group1.release(self.admin) - - campaign.delete() - - group1.release(self.admin) - group1.refresh_from_db() - - self.assertFalse(group1.is_active) - self.assertTrue(group1.name.startswith("deleted-")) - self.assertEqual(0, EventFire.objects.count()) # event fires will have been deleted - self.assertEqual({group2}, set(bcast1.groups.all())) # removed from scheduled broadcast - self.assertEqual({group1, group2}, set(bcast2.groups.all())) # regular broadcast unchanged - - self.assertEqual(set(), set(group1.contacts.all())) - self.assertEqual({contact2, contact3}, set(group2.contacts.all())) # unchanged - - # check that contacts who were in the group have had their modified_on times updated - contact1.refresh_from_db() - contact2.refresh_from_db() - contact3.refresh_from_db() - self.assertGreater(contact1.modified_on, t1) - self.assertGreater(contact2.modified_on, t1) - self.assertLess(contact3.modified_on, t1) # unchanged - - -class ContactGroupCRUDLTest(TembaTest, CRUDLTestMixin): - def setUp(self): - super().setUp() - - self.joe = self.create_contact("Joe Blow", phone="123") - self.frank = self.create_contact("Frank Smith", urns=["tel:1234", "twitter:hola"]) - - self.joe_and_frank = self.create_group("Customers", [self.joe, self.frank]) - - self.other_org_group = self.create_group("Customers", contacts=[], org=self.org2) - - @override_settings(ORG_LIMIT_DEFAULTS={"groups": 10}) - @mock_mailroom - def test_create(self, mr_mocks): - url = reverse("contacts.contactgroup_create") - - # can't create group as viewer - self.login(self.user) - response = self.client.post(url, {"name": "Spammers"}) - self.assertLoginRedirect(response) - - self.login(self.admin) - - # try to create a contact group whose name is only whitespace - response = self.client.post(url, {"name": " "}) - self.assertFormError(response.context["form"], "name", "This field is required.") - - # try to create a contact group whose name contains a disallowed character - response = self.client.post(url, {"name": '"People"'}) - self.assertFormError(response.context["form"], "name", 'Cannot contain the character: "') - - # try to create a contact group whose name is too long - response = self.client.post(url, {"name": "X" * 65}) - self.assertFormError( - response.context["form"], "name", "Ensure this value has at most 64 characters (it has 65)." - ) - - # try to create with name that's already taken - response = self.client.post(url, {"name": "Customers"}) - self.assertFormError(response.context["form"], "name", "Already used by another group.") - - # try to create with name that's already taken by a system group - response = self.client.post(url, {"name": "blocked"}) - self.assertFormError(response.context["form"], "name", "Already used by another group.") - - # create with valid name (that will be trimmed) - response = self.client.post(url, {"name": "first "}) - self.assertNoFormErrors(response) - ContactGroup.objects.get(org=self.org, name="first") - - # create a group with preselected contacts - self.client.post(url, {"name": "Everybody", "preselected_contacts": f"{self.joe.id},{self.frank.id}"}) - group = ContactGroup.objects.get(org=self.org, name="Everybody") - self.assertEqual(set(group.contacts.all()), {self.joe, self.frank}) - - # create a dynamic group using a query - self.client.post(url, {"name": "Frank", "group_query": "tel = 1234"}) - - ContactGroup.objects.get(org=self.org, name="Frank", query="tel = 1234") - - for group in ContactGroup.objects.filter(is_system=False): - group.release(self.admin) - - for i in range(10): - ContactGroup.create_manual(self.org2, self.admin2, "group%d" % i) - - response = self.client.post(url, {"name": "People"}) - self.assertNoFormErrors(response) - ContactGroup.objects.get(org=self.org, name="People") - - for group in ContactGroup.objects.filter(is_system=False): - group.release(self.admin) - - for i in range(10): - ContactGroup.create_manual(self.org, self.admin, "group%d" % i) - - self.assertEqual(10, ContactGroup.objects.filter(is_active=True, is_system=False).count()) - response = self.client.post(url, {"name": "People"}) - self.assertFormError( - response.context["form"], - "name", - "This workspace has reached its limit of 10 groups. You must delete existing ones before you can create new ones.", - ) - - def test_create_disallow_duplicates(self): - self.login(self.admin) - - self.client.post(reverse("contacts.contactgroup_create"), dict(name="First Group")) - - # assert it was created - ContactGroup.objects.get(name="First Group") - - # try to create another group with the same name, but a dynamic query, should fail - response = self.client.post( - reverse("contacts.contactgroup_create"), dict(name="First Group", group_query="firsts") - ) - self.assertFormError(response.context["form"], "name", "Already used by another group.") - - # try to create another group with same name, not dynamic, same thing - response = self.client.post( - reverse("contacts.contactgroup_create"), dict(name="First Group", group_query="firsts") - ) - self.assertFormError(response.context["form"], "name", "Already used by another group.") - - @mock_mailroom - def test_update(self, mr_mocks): - url = reverse("contacts.contactgroup_update", args=[self.joe_and_frank.id]) - - open_tickets = self.org.groups.get(name="Open Tickets") - dynamic_group = self.create_group("Dynamic", query="tel is 1234") - - # can't create group as viewer - self.login(self.user) - response = self.client.post(url, dict(name="Spammers")) - self.assertLoginRedirect(response) - - self.login(self.admin) - - # try to update name to only whitespace - response = self.client.post(url, dict(name=" ")) - self.assertFormError(response.context["form"], "name", "This field is required.") - - # try to update name to contain a disallowed character - response = self.client.post(url, dict(name='"People"')) - self.assertFormError(response.context["form"], "name", 'Cannot contain the character: "') - - # update with valid name (that will be trimmed) - response = self.client.post(url, dict(name="new name ")) - self.assertNoFormErrors(response) - - self.joe_and_frank.refresh_from_db() - self.assertEqual(self.joe_and_frank.name, "new name") - - # now try a dynamic group - url = reverse("contacts.contactgroup_update", args=[dynamic_group.id]) - - # mark our group as ready - ContactGroup.objects.filter(id=dynamic_group.id).update(status=ContactGroup.STATUS_READY) - - # update both name and query, form should fail, because query is not parsable - mr_mocks.exception(mailroom.QueryValidationException("error at !", "syntax")) - response = self.client.post(url, dict(name="Frank", query="(!))!)")) - self.assertFormError(response.context["form"], "query", "Invalid query syntax.") - - # try to update a group with an invalid query - mr_mocks.exception(mailroom.QueryValidationException("error at >", "syntax")) - response = self.client.post(url, dict(name="Frank", query="name <> some_name")) - self.assertFormError(response.context["form"], "query", "Invalid query syntax.") - - # dependent on id - response = self.client.post(url, dict(name="Frank", query="id = 123")) - self.assertFormError( - response.context["form"], "query", 'You cannot create a smart group based on "id" or "group".' - ) - - response = self.client.post(url, dict(name="Frank", query='twitter = "hola"')) - - self.assertNoFormErrors(response) - - dynamic_group.refresh_from_db() - self.assertEqual(dynamic_group.query, 'twitter = "hola"') - - # mark our dynamic group as evaluating - dynamic_group.status = ContactGroup.STATUS_EVALUATING - dynamic_group.save(update_fields=("status",)) - - # and check we can't change the query while that is the case - response = self.client.post(url, dict(name="Frank", query='twitter = "hello"')) - self.assertFormError( - response.context["form"], "query", "You cannot update the query of a group that is evaluating." - ) - - # but can change the name - response = self.client.post(url, dict(name="Frank2", query='twitter = "hola"')) - self.assertNoFormErrors(response) - - dynamic_group.refresh_from_db() - self.assertEqual(dynamic_group.name, "Frank2") - - # try to update a system group - response = self.client.post( - reverse("contacts.contactgroup_update", args=[open_tickets.id]), {"name": "new name"} - ) - self.assertEqual(404, response.status_code) - self.assertTrue(self.org.groups.filter(name="Open Tickets").exists()) - - # try to update group in other org - response = self.client.post( - reverse("contacts.contactgroup_update", args=[self.other_org_group.id]), {"name": "new name"} - ) - self.assertLoginRedirect(response) - - # check group is unchanged - self.other_org_group.refresh_from_db() - self.assertEqual("Customers", self.other_org_group.name) - - def test_usages(self): - flow = self.get_flow("dependencies", name="Dependencies") - group = ContactGroup.objects.get(name="Cat Facts") - - campaign1 = Campaign.create(self.org, self.admin, "Planting Reminders", group) - campaign2 = Campaign.create(self.org, self.admin, "Deleted", group) - campaign2.is_active = False - campaign2.save(update_fields=("is_active",)) - - trigger1 = Trigger.create( - self.org, - self.admin, - Trigger.TYPE_KEYWORD, - flow, - keywords=["test1"], - match_type=Trigger.MATCH_FIRST_WORD, - groups=[group], - ) - trigger2 = Trigger.create( - self.org, - self.admin, - Trigger.TYPE_KEYWORD, - flow, - keywords=["test2"], - match_type=Trigger.MATCH_FIRST_WORD, - exclude_groups=[group], - ) - - usages_url = reverse("contacts.contactgroup_usages", args=[group.uuid]) - - self.assertRequestDisallowed(usages_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(usages_url, [self.user, self.editor, self.admin], context_object=group) - - self.assertEqual( - {"flow": [flow], "campaign": [campaign1], "trigger": [trigger1, trigger2]}, - {t: list(qs) for t, qs in response.context["dependents"].items()}, - ) - - def test_delete(self): - # create a group which isn't used by anything - group1 = self.create_group("Group 1", contacts=[]) - - # create a group which is used only by a flow (soft dependency) - group2 = self.create_group("Group 2", contacts=[]) - flow1 = self.create_flow("Flow 1") - flow1.group_dependencies.add(group2) - - # create a group which is used by a flow (soft) and a scheduled trigger (soft) - group3 = self.create_group("Group 3", contacts=[]) - flow2 = self.create_flow("Flow 2") - flow2.group_dependencies.add(group3) - schedule1 = Schedule.create(self.org, timezone.now() + timedelta(days=3), Schedule.REPEAT_DAILY) - trigger1 = Trigger.create( - self.org, - self.admin, - trigger_type=Trigger.TYPE_SCHEDULE, - flow=flow2, - keywords=["trigger1"], - match_type=Trigger.MATCH_FIRST_WORD, - groups=[group3.id], - schedule=schedule1, - ) - self.assertEqual(1, group3.triggers.count()) - self.assertEqual(trigger1, group3.triggers.get(is_active=True, keywords=trigger1.keywords)) - - # create a group which is used by a flow (soft), a trigger (soft), and a campaign (hard dependency) - group4 = self.create_group("Group 4", contacts=[]) - flow3 = self.create_flow("Flow 3") - flow3.group_dependencies.add(group4) - trigger2 = Trigger.create( - self.org, - self.admin, - Trigger.TYPE_KEYWORD, - flow3, - keywords=["trigger2"], - match_type=Trigger.MATCH_FIRST_WORD, - groups=[group4], - ) - campaign1 = Campaign.create(self.org, self.admin, "Planting Reminders", group4) - - delete_group1_url = reverse("contacts.contactgroup_delete", args=[group1.uuid]) - delete_group2_url = reverse("contacts.contactgroup_delete", args=[group2.uuid]) - delete_group3_url = reverse("contacts.contactgroup_delete", args=[group3.uuid]) - delete_group4_url = reverse("contacts.contactgroup_delete", args=[group4.uuid]) - - self.assertRequestDisallowed(delete_group1_url, [None, self.user, self.agent, self.admin2]) - - # a group with no dependents can be deleted - response = self.assertDeleteFetch(delete_group1_url, [self.editor, self.admin]) - - self.assertEqual({}, response.context["soft_dependents"]) - self.assertEqual({}, response.context["hard_dependents"]) - self.assertContains(response, "You are about to delete") - self.assertContains(response, "There is no way to undo this. Are you sure?") - - self.assertDeleteSubmit(delete_group1_url, self.admin, object_deactivated=group1, success_status=200) - - # a group with only soft dependents can be deleted but we give warnings - response = self.assertDeleteFetch(delete_group2_url, [self.editor]) - - self.assertEqual({"flow"}, set(response.context["soft_dependents"].keys())) - self.assertEqual({}, response.context["hard_dependents"]) - self.assertContains(response, "is used by the following items but can still be deleted:") - self.assertContains(response, flow1.name) - self.assertContains(response, "There is no way to undo this. Are you sure?") - - self.assertDeleteSubmit(delete_group2_url, self.admin, object_deactivated=group2, success_status=200) - - # check that the flow is now marked as having issues - flow1.refresh_from_db() - self.assertTrue(flow1.has_issues) - self.assertNotIn(group2, flow1.field_dependencies.all()) - - # a group with only soft dependents can be deleted but we give warnings - response = self.assertDeleteFetch(delete_group3_url, [self.admin]) - - self.assertEqual({"flow", "trigger"}, set(response.context["soft_dependents"].keys())) - self.assertEqual({}, response.context["hard_dependents"]) - self.assertContains(response, "is used by the following items but can still be deleted:") - self.assertContains(response, flow2.name) - self.assertContains(response, f"Schedule → {flow2.name}") - self.assertContains(response, "There is no way to undo this. Are you sure?") - - self.assertDeleteSubmit(delete_group3_url, self.admin, object_deactivated=group3, success_status=200) - - # check that the flow is now marked as having issues - flow2.refresh_from_db() - self.assertTrue(flow2.has_issues) - self.assertNotIn(group3, flow2.field_dependencies.all()) - - # check that the trigger is released - trigger1.refresh_from_db() - self.assertFalse(trigger1.is_active) - - # a group with hard dependents can't be deleted - response = self.assertDeleteFetch(delete_group4_url, [self.admin]) - - self.assertEqual({"flow", "trigger"}, set(response.context["soft_dependents"].keys())) - self.assertEqual({"campaign"}, set(response.context["hard_dependents"].keys())) - self.assertContains(response, "can't be deleted as it is still used by the following items:") - self.assertContains(response, campaign1.name) - self.assertNotContains(response, "Delete") - - # check that the flow is not deleted - flow3.refresh_from_db() - self.assertTrue(flow3.is_active) - - # check that the trigger is not released - trigger2.refresh_from_db() - self.assertTrue(trigger2.is_active) - - # check that the campaign is not deleted - campaign1.refresh_from_db() - self.assertTrue(campaign1.is_active) - - -class ContactTest(TembaTest, CRUDLTestMixin): - def setUp(self): - super().setUp() - - self.user1 = self.create_user("nash") - - self.joe = self.create_contact(name="Joe Blow", urns=["twitter:blow80", "tel:+250781111111"]) - self.frank = self.create_contact(name="Frank Smith", phone="+250782222222") - self.billy = self.create_contact(name="Billy Nophone") - self.voldemort = self.create_contact(phone="+250768383383") - - # create an orphaned URN - ContactURN.objects.create( - org=self.org, scheme="tel", path="+250788888888", identity="tel:+250788888888", priority=50 - ) - - # create an deleted contact - self.jim = self.create_contact(name="Jim") - self.jim.release(self.user) - - # create contact in other org - self.other_org_contact = self.create_contact(name="Fred", phone="+250768111222", org=self.org2) - - def create_campaign(self): - # create a campaign with a future event and add joe - self.farmers = self.create_group("Farmers", [self.joe]) - self.reminder_flow = self.create_flow("Reminder Flow") - self.planting_date = self.create_field("planting_date", "Planting Date", value_type=ContactField.TYPE_DATETIME) - self.campaign = Campaign.create(self.org, self.admin, "Planting Reminders", self.farmers) - - # create af flow event - self.planting_reminder = CampaignEvent.create_flow_event( - self.org, - self.admin, - self.campaign, - relative_to=self.planting_date, - offset=0, - unit="D", - flow=self.reminder_flow, - delivery_hour=17, - ) - - # and a message event - self.message_event = CampaignEvent.create_message_event( - self.org, - self.admin, - self.campaign, - relative_to=self.planting_date, - offset=7, - unit="D", - message="Sent 7 days after planting date", - ) - - def test_contact_notes(self): - note_text = "This is note" - - # create 10 notes - for i in range(10): - self.joe.set_note(self.user, f"{note_text} {i+1}") - - notes = self.joe.notes.all().order_by("id") - - # we should only have five notes after pruning - self.assertEqual(5, notes.count()) - - # check that the oldest notes are the ones that were pruned - self.assertEqual("This is note 6", notes.first().text) - - @mock_mailroom - def test_block_and_stop(self, mr_mocks): - self.joe.block(self.admin) - self.joe.stop(self.admin) - self.joe.restore(self.admin) - - self.assertEqual( - [ - call(self.org, self.admin, [self.joe], [modifiers.Status(status="blocked")]), - call(self.org, self.admin, [self.joe], [modifiers.Status(status="stopped")]), - call(self.org, self.admin, [self.joe], [modifiers.Status(status="active")]), - ], - mr_mocks.calls["contact_modify"], - ) - - @mock_mailroom - def test_open_ticket(self, mock_contact_modify): - mock_contact_modify.return_value = {self.joe.id: {"contact": {}, "events": []}} - - ticket = self.joe.open_ticket( - self.admin, topic=self.org.default_ticket_topic, assignee=self.agent, note="Looks sus" - ) - - self.assertEqual(self.org.default_ticket_topic, ticket.topic) - self.assertEqual("Looks sus", ticket.events.get(event_type="O").note) - - @mock_mailroom - def test_interrupt(self, mr_mocks): - # noop when contact not in a flow - self.assertFalse(self.joe.interrupt(self.admin)) - - flow = self.create_flow("Test") - MockSessionWriter(self.joe, flow).wait().save() - - self.assertTrue(self.joe.interrupt(self.admin)) - - @mock_mailroom - def test_release(self, mr_mocks): - # create a contact with a message - old_contact = self.create_contact("Jose", phone="+12065552000") - self.create_incoming_msg(old_contact, "hola mundo") - urn = old_contact.get_urn() - self.create_channel_event(self.channel, urn.identity, ChannelEvent.TYPE_CALL_IN_MISSED) - - self.create_ticket(old_contact) - - ivr_flow = self.get_flow("ivr") - msg_flow = self.get_flow("favorites_v13") - - self.create_incoming_call(msg_flow, old_contact) - - # steal his urn into a new contact - contact = self.create_contact("Joe", urns=["twitter:tweettweet"], fields={"gender": "Male", "age": 40}) - urn.contact = contact - urn.save(update_fields=("contact",)) - group = self.create_group("Test Group", contacts=[contact]) - - contact2 = self.create_contact("Billy", urns=["tel:1234567"]) - - # create scheduled and regular broadcasts which send to both contacts - schedule = Schedule.create(self.org, timezone.now(), Schedule.REPEAT_DAILY) - bcast1 = self.create_broadcast( - self.admin, {"eng": {"text": "Test"}}, contacts=[contact, contact2], schedule=schedule - ) - bcast2 = self.create_broadcast(self.admin, {"eng": {"text": "Test"}}, contacts=[contact, contact2]) - - flow_nodes = msg_flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - name_prompt = flow_nodes[6] - name_split = flow_nodes[7] - - ( - MockSessionWriter(contact, msg_flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "red")) - .visit(beer_prompt) - .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "primus")) - .visit(name_prompt) - .send_msg("Lastly, what is your name?", self.channel) - .visit(name_split) - .wait() - .save() - ) - - campaign = Campaign.create(self.org, self.admin, "Reminders", group) - joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) - event = CampaignEvent.create_message_event(self.org, self.admin, campaign, joined, 2, unit="D", message="Hi") - EventFire.objects.create(event=event, contact=contact, scheduled=timezone.now() + timedelta(days=2)) - - self.create_incoming_call(msg_flow, contact) - - # give contact an open and a closed ticket - self.create_ticket(contact) - self.create_ticket(contact, closed_on=timezone.now()) - - self.assertEqual(1, group.contacts.all().count()) - self.assertEqual(1, contact.calls.all().count()) - self.assertEqual(2, contact.addressed_broadcasts.all().count()) - self.assertEqual(2, contact.urns.all().count()) - self.assertEqual(2, contact.runs.all().count()) - self.assertEqual(7, contact.msgs.all().count()) - self.assertEqual(2, len(contact.fields)) - self.assertEqual(1, contact.campaign_fires.count()) - - self.assertEqual(2, TicketCount.get_all(self.org, Ticket.STATUS_OPEN)) - self.assertEqual(1, TicketCount.get_all(self.org, Ticket.STATUS_CLOSED)) - - # first try releasing with _full_release patched so we can check the state of the contact before the task - # to do a full release has kicked off - with patch("temba.contacts.models.Contact._full_release"): - contact.release(self.admin) - - self.assertEqual(2, contact.urns.all().count()) - for urn in contact.urns.all(): - UUID(urn.path, version=4) - self.assertEqual(URN.DELETED_SCHEME, urn.scheme) - - # tickets unchanged - self.assertEqual(2, contact.tickets.count()) - - # a new contact arrives with those urns - new_contact = self.create_contact("URN Thief", urns=["tel:+12065552000", "twitter:tweettweet"]) - self.assertEqual(2, new_contact.urns.all().count()) - - self.assertEqual({contact2}, set(bcast1.contacts.all())) - self.assertEqual({contact, contact2}, set(bcast2.contacts.all())) - - # now lets go for a full release - contact.release(self.admin) - - contact.refresh_from_db() - self.assertEqual(0, group.contacts.all().count()) - self.assertEqual(0, contact.calls.all().count()) - self.assertEqual(0, contact.addressed_broadcasts.all().count()) - self.assertEqual(0, contact.urns.all().count()) - self.assertEqual(0, contact.runs.all().count()) - self.assertEqual(0, contact.msgs.all().count()) - self.assertEqual(0, contact.campaign_fires.count()) - - # tickets deleted (only for this contact) - self.assertEqual(0, contact.tickets.count()) - self.assertEqual(1, TicketCount.get_all(self.org, Ticket.STATUS_OPEN)) - self.assertEqual(0, TicketCount.get_all(self.org, Ticket.STATUS_CLOSED)) - - # contact who used to own our urn had theirs released too - self.assertEqual(0, old_contact.calls.all().count()) - self.assertEqual(0, old_contact.msgs.all().count()) - - self.assertIsNone(contact.fields) - self.assertIsNone(contact.name) - - # nope, we aren't paranoid or anything - Org.objects.get(id=self.org.id) - Flow.objects.get(id=msg_flow.id) - Flow.objects.get(id=ivr_flow.id) - self.assertEqual(1, Ticket.objects.count()) - - @mock_mailroom - def test_status_changes_and_release(self, mr_mocks): - flow = self.create_flow("Test") - msg1 = self.create_incoming_msg(self.joe, "Test 1") - msg2 = self.create_incoming_msg(self.joe, "Test 2", flow=flow) - msg3 = self.create_incoming_msg(self.joe, "Test 3", visibility="A") - label = self.create_label("Interesting") - label.toggle_label([msg1, msg2, msg3], add=True) - static_group = self.create_group("Just Joe", [self.joe]) - - msg_counts = SystemLabel.get_counts(self.org) - self.assertEqual(1, msg_counts[SystemLabel.TYPE_INBOX]) - self.assertEqual(1, msg_counts[SystemLabel.TYPE_FLOWS]) - self.assertEqual(1, msg_counts[SystemLabel.TYPE_ARCHIVED]) - - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 4, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - self.assertEqual(set(label.msgs.all()), {msg1, msg2, msg3}) - self.assertEqual(set(static_group.contacts.all()), {self.joe}) - - self.joe.stop(self.user) - - # check that joe is now stopped - self.joe = Contact.objects.get(pk=self.joe.pk) - self.assertEqual(Contact.STATUS_STOPPED, self.joe.status) - self.assertTrue(self.joe.is_active) - - # and added to stopped group - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 1, - Contact.STATUS_ARCHIVED: 0, - }, - ) - self.assertEqual(set(static_group.contacts.all()), set()) - - self.joe.block(self.user) - - # check that joe is now blocked instead of stopped - self.joe.refresh_from_db() - self.assertEqual(Contact.STATUS_BLOCKED, self.joe.status) - self.assertTrue(self.joe.is_active) - - # and that he's been removed from the all and failed groups, and added to the blocked group - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 1, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - # and removed from all groups - self.assertEqual(set(static_group.contacts.all()), set()) - - # but his messages are unchanged - self.assertEqual(2, Msg.objects.filter(contact=self.joe, visibility="V").count()) - msg_counts = SystemLabel.get_counts(self.org) - self.assertEqual(1, msg_counts[SystemLabel.TYPE_INBOX]) - self.assertEqual(1, msg_counts[SystemLabel.TYPE_FLOWS]) - self.assertEqual(1, msg_counts[SystemLabel.TYPE_ARCHIVED]) - - self.joe.archive(self.admin) - - # check that joe is now archived - self.joe.refresh_from_db() - self.assertEqual(Contact.STATUS_ARCHIVED, self.joe.status) - self.assertTrue(self.joe.is_active) - - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 1, - }, - ) - - self.joe.restore(self.admin) - - # check that joe is now neither blocked or stopped - self.joe = Contact.objects.get(pk=self.joe.pk) - self.assertEqual(Contact.STATUS_ACTIVE, self.joe.status) - self.assertTrue(self.joe.is_active) - - # and that he's been removed from the blocked group, and put back in the all and failed groups - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 4, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - self.joe.release(self.user) - - # check that joe has been released (doesn't change his status) - self.joe = Contact.objects.get(pk=self.joe.pk) - self.assertEqual(Contact.STATUS_ACTIVE, self.joe.status) - self.assertFalse(self.joe.is_active) - - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - # joe's messages should be inactive, blank and have no labels - self.assertEqual(0, Msg.objects.filter(contact=self.joe, visibility="V").count()) - self.assertEqual(0, Msg.objects.filter(contact=self.joe).exclude(text="").count()) - self.assertEqual(0, label.msgs.count()) - - msg_counts = SystemLabel.get_counts(self.org) - self.assertEqual(0, msg_counts[SystemLabel.TYPE_INBOX]) - self.assertEqual(0, msg_counts[SystemLabel.TYPE_FLOWS]) - self.assertEqual(0, msg_counts[SystemLabel.TYPE_ARCHIVED]) - - # and he shouldn't be in any groups - self.assertEqual(set(static_group.contacts.all()), set()) - - # or have any URNs - self.assertEqual(0, ContactURN.objects.filter(contact=self.joe).count()) - - # blocking and failing an inactive contact won't change groups - self.joe.block(self.user) - self.joe.stop(self.user) - - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 0, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - # we don't let users undo releasing a contact... but if we have to do it for some reason - self.joe.is_active = True - self.joe.save(update_fields=("is_active",)) - - # check joe goes into the appropriate groups - self.assertEqual( - Contact.get_status_counts(self.org), - { - Contact.STATUS_ACTIVE: 3, - Contact.STATUS_BLOCKED: 0, - Contact.STATUS_STOPPED: 1, - Contact.STATUS_ARCHIVED: 0, - }, - ) - - def test_contact_display(self): - self.assertEqual("Joe Blow", self.joe.get_display(org=self.org, formatted=False)) - self.assertEqual("Joe Blow", self.joe.get_display()) - self.assertEqual("+250768383383", self.voldemort.get_display(org=self.org, formatted=False)) - self.assertEqual("0768 383 383", self.voldemort.get_display()) - self.assertEqual("Billy Nophone", self.billy.get_display()) - - self.assertEqual("0781 111 111", self.joe.get_urn_display(scheme=URN.TEL_SCHEME)) - self.assertEqual("blow80", self.joe.get_urn_display(org=self.org, formatted=False)) - self.assertEqual("blow80", self.joe.get_urn_display()) - self.assertEqual("+250768383383", self.voldemort.get_urn_display(org=self.org, formatted=False)) - self.assertEqual( - "+250768383383", self.voldemort.get_urn_display(org=self.org, formatted=False, international=True) - ) - self.assertEqual("+250 768 383 383", self.voldemort.get_urn_display(org=self.org, international=True)) - self.assertEqual("0768 383 383", self.voldemort.get_urn_display()) - self.assertEqual("", self.billy.get_urn_display()) - - self.assertEqual("Joe Blow", str(self.joe)) - self.assertEqual("0768 383 383", str(self.voldemort)) - self.assertEqual("Billy Nophone", str(self.billy)) - - with self.anonymous(self.org): - self.assertEqual("Joe Blow", self.joe.get_display(org=self.org, formatted=False)) - self.assertEqual("Joe Blow", self.joe.get_display()) - self.assertEqual("%010d" % self.voldemort.pk, self.voldemort.get_display()) - self.assertEqual("Billy Nophone", self.billy.get_display()) - - self.assertEqual(ContactURN.ANON_MASK, self.joe.get_urn_display(org=self.org, formatted=False)) - self.assertEqual(ContactURN.ANON_MASK, self.joe.get_urn_display()) - self.assertEqual(ContactURN.ANON_MASK, self.voldemort.get_urn_display()) - self.assertEqual("", self.billy.get_urn_display()) - self.assertEqual("", self.billy.get_urn_display(scheme=URN.TEL_SCHEME)) - - self.assertEqual("Joe Blow", str(self.joe)) - self.assertEqual("%010d" % self.voldemort.pk, str(self.voldemort)) - self.assertEqual("Billy Nophone", str(self.billy)) - - def test_bulk_urn_cache_initialize(self): - self.joe.refresh_from_db() - self.billy.refresh_from_db() - - contacts = (self.joe, self.frank, self.billy) - Contact.bulk_urn_cache_initialize(contacts) - - with self.assertNumQueries(0): - self.assertEqual(["twitter:blow80", "tel:+250781111111"], [u.urn for u in self.joe.get_urns()]) - self.assertEqual(["twitter:blow80", "tel:+250781111111"], [u.urn for u in getattr(self.joe, "_urns_cache")]) - self.assertEqual(["tel:+250782222222"], [u.urn for u in self.frank.get_urns()]) - self.assertEqual([], [u.urn for u in self.billy.get_urns()]) - - @mock_mailroom - def test_bulk_inspect(self, mr_mocks): - self.assertEqual({}, Contact.bulk_inspect([])) - self.assertEqual( - { - self.joe: { - "urns": [ - { - "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, - "scheme": "tel", - "path": "+250781111111", - "display": "", - }, - {"channel": None, "scheme": "twitter", "path": "blow80", "display": ""}, - ] - }, - self.billy: {"urns": []}, - }, - Contact.bulk_inspect([self.joe, self.billy]), - ) - - @mock_mailroom - def test_omnibox(self, mr_mocks): - omnibox_url = reverse("contacts.contact_omnibox") - - # add a group with members and an empty group - self.create_field("gender", "Gender") - open_tickets = self.org.groups.get(name="Open Tickets") - joe_and_frank = self.create_group("Joe and Frank", [self.joe, self.frank]) - nobody = self.create_group("Nobody", []) - - men = self.create_group("Men", query="gender=M") - ContactGroup.objects.filter(id=men.id).update(status=ContactGroup.STATUS_READY) - - # a group which is being re-evaluated and shouldn't appear in any omnibox results - unready = self.create_group("Group being re-evaluated...", query="gender=M") - unready.status = ContactGroup.STATUS_EVALUATING - unready.save(update_fields=("status",)) - - # Postgres will defer to strcoll for ordering which even for en_US.UTF-8 will return different results on OSX - # and Ubuntu. To keep ordering consistent for this test, we don't let URNs start with + - # (see http://postgresql.nabble.com/a-strange-order-by-behavior-td4513038.html) - ContactURN.objects.filter(path__startswith="+").update( - path=Substr("path", 2), identity=Concat(DbValue("tel:"), Substr("path", 2)) - ) - - self.login(self.admin) - - def omnibox_request(query: str): - response = self.client.get(omnibox_url + query) - return response.json()["results"] - - # mock mailroom to return an error - mr_mocks.exception(mailroom.QueryValidationException("ooh that doesn't look right", "syntax")) - - # error is swallowed and we show no results - self.assertEqual([], omnibox_request("?search=-123`213")) - - # lookup specific contacts - self.assertEqual( - [ - {"id": str(self.billy.uuid), "name": "Billy Nophone", "type": "contact", "urn": ""}, - {"id": str(self.joe.uuid), "name": "Joe Blow", "type": "contact", "urn": "blow80"}, - ], - omnibox_request(f"?c={self.joe.uuid},{self.billy.uuid}"), - ) - - # lookup specific groups - self.assertEqual( - [ - {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, - {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, - ], - omnibox_request(f"?g={joe_and_frank.uuid},{men.uuid}"), - ) - - # empty query just returns up to 25 groups A-Z - with self.assertNumQueries(10): - self.assertEqual( - [ - {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, - {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, - {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, - {"id": str(open_tickets.uuid), "name": "Open Tickets", "type": "group", "count": 0}, - ], - omnibox_request(""), - ) - - with self.assertNumQueries(13): - mr_mocks.contact_search(query='name ~ "250" OR urn ~ "250"', total=2, contacts=[self.billy, self.frank]) - - self.assertEqual( - [ - {"id": str(self.billy.uuid), "name": "Billy Nophone", "type": "contact", "urn": ""}, - {"id": str(self.frank.uuid), "name": "Frank Smith", "type": "contact", "urn": "250782222222"}, - ], - omnibox_request("?search=250"), - ) - - with self.assertNumQueries(14): - mr_mocks.contact_search(query='name ~ "FRA" OR urn ~ "FRA"', total=1, contacts=[self.frank]) - - self.assertEqual( - [ - {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, - {"id": str(self.frank.uuid), "name": "Frank Smith", "type": "contact", "urn": "250782222222"}, - ], - omnibox_request("?search=FRA"), - ) - - # specify type filter g (all groups) - self.assertEqual( - [ - {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, - {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, - {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, - {"id": str(open_tickets.uuid), "name": "Open Tickets", "type": "group", "count": 0}, - ], - omnibox_request("?types=g"), - ) - - # specify type filter s (non-query groups) - self.assertEqual( - [ - {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, - {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, - ], - omnibox_request("?types=s"), - ) - - with self.anonymous(self.org): - self.assertEqual( - [ - {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, - {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, - {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, - {"id": str(open_tickets.uuid), "name": "Open Tickets", "type": "group", "count": 0}, - ], - omnibox_request(""), - ) - - mr_mocks.contact_search(query='name ~ "Billy"', total=1, contacts=[self.billy]) - - self.assertEqual( - [ - {"id": str(self.billy.uuid), "name": "Billy Nophone", "type": "contact"}, - ], - omnibox_request("?search=Billy"), - ) - - # exclude blocked and stopped contacts - self.joe.block(self.admin) - self.frank.stop(self.admin) - - # lookup by contact uuids - self.assertEqual(omnibox_request("?c=%s,%s" % (self.joe.uuid, self.frank.uuid)), []) - - def test_history(self): - url = reverse("contacts.contact_history", args=[self.joe.uuid]) - - kurt = self.create_contact("Kurt", phone="123123") - self.joe.created_on = timezone.now() - timedelta(days=1000) - self.joe.save(update_fields=("created_on",)) - - self.create_broadcast(self.user, {"eng": {"text": "A beautiful broadcast"}}, contacts=[self.joe]) - self.create_campaign() - - # add a message with some attachments - self.create_incoming_msg( - self.joe, - "Message caption", - created_on=timezone.now(), - attachments=[ - "audio/mp3:http://blah/file.mp3", - "video/mp4:http://blah/file.mp4", - "geo:47.5414799,-122.6359908", - ], - ) - - # create some messages - for i in range(94): - self.create_incoming_msg( - self.joe, "Inbound message %d" % i, created_on=timezone.now() - timedelta(days=(100 - i)) - ) - - # because messages are stored with timestamps from external systems, possible to have initial message - # which is little bit older than the contact itself - self.create_incoming_msg( - self.joe, "Very old inbound message", created_on=self.joe.created_on - timedelta(seconds=10) - ) - - flow = self.get_flow("color_v13") - nodes = flow.get_definition()["nodes"] - color_prompt = nodes[0] - color_split = nodes[4] - - ( - MockSessionWriter(self.joe, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .call_webhook("POST", "https://example.com/", "1234") # pretend that flow run made a webhook request - .visit(color_split) - .set_result("Color", "green", "Green", "I like green") - .wait() - .save() - ) - ( - MockSessionWriter(kurt, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ) - - # mark an outgoing message as failed - failed = Msg.objects.filter(direction="O", contact=self.joe).last() - failed.status = "F" - failed.save(update_fields=("status",)) - - # create an airtime transfer - AirtimeTransfer.objects.create( - org=self.org, - status="S", - contact=self.joe, - currency="RWF", - desired_amount=Decimal("100"), - actual_amount=Decimal("100"), - ) - - # create an event from the past - scheduled = timezone.now() - timedelta(days=5) - EventFire.objects.create(event=self.planting_reminder, contact=self.joe, scheduled=scheduled, fired=scheduled) - - # two tickets for joe - sales = Topic.create(self.org, self.admin, "Sales") - self.create_ticket(self.joe, opened_on=timezone.now(), closed_on=timezone.now()) - ticket = self.create_ticket(self.joe, topic=sales) - - # create missed incoming and outgoing calls - self.create_channel_event( - self.channel, str(self.joe.get_urn(URN.TEL_SCHEME)), ChannelEvent.TYPE_CALL_OUT_MISSED, extra={} - ) - self.create_channel_event( - self.channel, str(self.joe.get_urn(URN.TEL_SCHEME)), ChannelEvent.TYPE_CALL_IN_MISSED, extra={} - ) - - # and a referral event - self.create_channel_event( - self.channel, str(self.joe.get_urn(URN.TEL_SCHEME)), ChannelEvent.TYPE_NEW_CONVERSATION, extra={} - ) - - # add a failed call - Call.objects.create( - contact=self.joe, - status=Call.STATUS_ERRORED, - error_reason=Call.ERROR_NOANSWER, - channel=self.channel, - org=self.org, - contact_urn=self.joe.urns.all().first(), - error_count=0, - ) - - # add a note to our open ticket - ticket.events.create( - org=self.org, - contact=ticket.contact, - event_type="N", - note="I have a bad feeling about this", - created_by=self.admin, - ) - - # create an assignment - ticket.events.create( - org=self.org, - contact=ticket.contact, - event_type="A", - created_by=self.admin, - assignee=self.admin, - ) - - # set an output URL on our session so we fetch from there - s = FlowSession.objects.get(contact=self.joe) - s3.client().put_object( - Bucket="test-sessions", Key="c/session.json", Body=io.BytesIO(json.dumps(s.output).encode()) - ) - FlowSession.objects.filter(id=s.id).update(output_url="http://minio:9000/test-sessions/c/session.json") - - # fetch our contact history - self.login(self.admin) - with self.assertNumQueries(27): - response = self.client.get(url + "?limit=100") - - # history should include all messages in the last 90 days, the channel event, the call, and the flow run - history = response.json()["events"] - self.assertEqual(96, len(history)) - - def assertHistoryEvent(events, index, expected_type, **kwargs): - item = events[index] - self.assertEqual(expected_type, item["type"], f"event type mismatch for item {index}") - self.assertTrue(iso8601.parse_date(item["created_on"])) # check created_on exists and is ISO string - - for path, expected in kwargs.items(): - self.assertPathValue(item, path, expected, f"item {index}") - - assertHistoryEvent(history, 0, "call_started", status="E", status_display="Errored (No Answer)") - assertHistoryEvent(history, 1, "channel_event", channel_event_type="new_conversation") - assertHistoryEvent(history, 2, "channel_event", channel_event_type="mo_miss") - assertHistoryEvent(history, 3, "channel_event", channel_event_type="mt_miss") - assertHistoryEvent(history, 4, "ticket_opened", ticket__topic__name="Sales") - assertHistoryEvent(history, 5, "ticket_closed", ticket__topic__name="General") - assertHistoryEvent(history, 6, "ticket_opened", ticket__topic__name="General") - assertHistoryEvent(history, 7, "airtime_transferred", actual_amount="100.00") - assertHistoryEvent(history, 8, "msg_created", msg__text="What is your favorite color?") - assertHistoryEvent(history, 9, "flow_entered", flow__name="Colors") - assertHistoryEvent(history, 10, "msg_received", msg__text="Message caption") - assertHistoryEvent( - history, 11, "msg_created", msg__text="A beautiful broadcast", created_by__email="viewer@nyaruka.com" - ) - assertHistoryEvent(history, 12, "campaign_fired", campaign__name="Planting Reminders") - assertHistoryEvent(history, -1, "msg_received", msg__text="Inbound message 11") - - # revert back to reading only from DB - FlowSession.objects.filter(id=s.id).update(output_url=None) - - # can filter by ticket to only all ticket events from that ticket rather than some events from all tickets - response = self.client.get(url + f"?ticket={ticket.uuid}&limit=100") - history = response.json()["events"] - assertHistoryEvent(history, 0, "ticket_assigned", assignee__id=self.admin.id) - assertHistoryEvent(history, 1, "ticket_note_added", note="I have a bad feeling about this") - assertHistoryEvent(history, 5, "channel_event", channel_event_type="mt_miss") - assertHistoryEvent(history, 6, "ticket_opened", ticket__topic__name="Sales") - assertHistoryEvent(history, 7, "airtime_transferred", actual_amount="100.00") - - # fetch next page - before = datetime_to_timestamp(timezone.now() - timedelta(days=90)) - response = self.fetch_protected(url + "?limit=100&before=%d" % before, self.admin) - self.assertFalse(response.json()["has_older"]) - - # activity should include 11 remaining messages and the event fire - history = response.json()["events"] - self.assertEqual(12, len(history)) - assertHistoryEvent(history, 0, "msg_received", msg__text="Inbound message 10") - assertHistoryEvent(history, 10, "msg_received", msg__text="Inbound message 0") - assertHistoryEvent(history, 11, "msg_received", msg__text="Very old inbound message") - - response = self.fetch_protected(url + "?limit=100", self.admin) - history = response.json()["events"] - - self.assertEqual(96, len(history)) - assertHistoryEvent(history, 8, "msg_created", msg__text="What is your favorite color?") - - # if a new message comes in - self.create_incoming_msg(self.joe, "Newer message") - response = self.fetch_protected(url, self.admin) - - # now we'll see the message that just came in first, followed by the call event - history = response.json()["events"] - assertHistoryEvent(history, 0, "msg_received", msg__text="Newer message") - assertHistoryEvent(history, 1, "call_started", status="E", status_display="Errored (No Answer)") - - recent_start = datetime_to_timestamp(timezone.now() - timedelta(days=1)) - response = self.fetch_protected(url + "?limit=100&after=%s" % recent_start, self.admin) - - # with our recent flag on, should not see the older messages - events = response.json()["events"] - self.assertEqual(13, len(events)) - self.assertContains(response, "file.mp4") - - # can't view history of contact in another org - hans = self.create_contact("Hans", urns=["twitter:hans"], org=self.org2) - response = self.client.get(reverse("contacts.contact_history", args=[hans.uuid])) - self.assertLoginRedirect(response) - - # invalid UUID should return 404 - response = self.client.get(reverse("contacts.contact_history", args=["bad-uuid"])) - self.assertEqual(response.status_code, 404) - - # add a new run - ( - MockSessionWriter(self.joe, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ) - - response = self.fetch_protected(url + "?limit=200", self.admin) - history = response.json()["events"] - self.assertEqual(100, len(history)) - - # before date should not match our last activity, that only happens when we truncate - resp_json = response.json() - self.assertNotEqual( - resp_json["next_before"], - datetime_to_timestamp(iso8601.parse_date(resp_json["events"][-1]["created_on"])), - ) - - assertHistoryEvent(history, 0, "msg_created", msg__text="What is your favorite color?") - assertHistoryEvent(history, 1, "flow_entered") - assertHistoryEvent(history, 2, "flow_exited") - assertHistoryEvent(history, 3, "msg_received", msg__text="Newer message") - assertHistoryEvent(history, 4, "call_started") - assertHistoryEvent(history, 5, "channel_event") - assertHistoryEvent(history, 6, "channel_event") - assertHistoryEvent(history, 7, "channel_event") - assertHistoryEvent(history, 8, "ticket_opened") - assertHistoryEvent(history, 9, "ticket_closed") - assertHistoryEvent(history, 10, "ticket_opened") - assertHistoryEvent(history, 11, "airtime_transferred") - assertHistoryEvent(history, 12, "msg_created", msg__text="What is your favorite color?") - assertHistoryEvent(history, 13, "flow_entered") - - # make our message event older than our planting reminder - self.message_event.created_on = self.planting_reminder.created_on - timedelta(days=1) - self.message_event.save() - - # but fire it immediately - scheduled = timezone.now() - EventFire.objects.create(event=self.message_event, contact=self.joe, scheduled=scheduled, fired=scheduled) - - # when fetched with limit of 1, it should be the only event we see - response = self.fetch_protected( - url + "?limit=1&before=%d" % datetime_to_timestamp(scheduled + timedelta(minutes=5)), self.admin - ) - assertHistoryEvent(response.json()["events"], 0, "campaign_fired", campaign_event__id=self.message_event.id) - - # now try the proper max history to test truncation - response = self.fetch_protected(url + "?before=%d" % datetime_to_timestamp(timezone.now()), self.admin) - - # our before should be the same as the last item - resp_json = response.json() - last_item_date = datetime_to_timestamp(iso8601.parse_date(resp_json["events"][-1]["created_on"])) - self.assertEqual(resp_json["next_before"], last_item_date) - - # and our after should be 90 days earlier - self.assertEqual(resp_json["next_after"], last_item_date - (90 * 24 * 60 * 60 * 1000 * 1000)) - self.assertEqual(50, len(resp_json["events"])) - - # and we should have a marker for older items - self.assertTrue(resp_json["has_older"]) - - # can't view history of contact in other org - response = self.client.get(reverse("contacts.contact_history", args=[self.other_org_contact.uuid])) - self.assertLoginRedirect(response) - - def test_history_session_events(self): - flow = self.get_flow("color_v13") - nodes = flow.get_definition()["nodes"] - ( - MockSessionWriter(self.joe, flow) - .visit(nodes[0]) - .add_contact_urn("twitter", "joey") - .set_contact_field("gender", "Gender", "M") - .set_contact_field("age", "Age", "") - .set_contact_language("spa") - .set_contact_language("") - .set_contact_name("Joe") - .set_contact_name("") - .set_result("Color", "red", "Red", "it's red") - .send_email(["joe@nyaruka.com"], "Test", "Hello there Joe") - .error("unable to send email") - .fail("this is a failure") - .save() - ) - - history_url = reverse("contacts.contact_history", args=[self.joe.uuid]) - self.login(self.user) - - response = self.client.get(history_url) - self.assertEqual(200, response.status_code) - - resp_json = response.json() - self.assertEqual(9, len(resp_json["events"])) - self.assertEqual( - [ - "flow_exited", - "contact_name_changed", - "contact_name_changed", - "contact_language_changed", - "contact_language_changed", - "contact_field_changed", - "contact_field_changed", - "contact_urns_changed", - "flow_entered", - ], - [e["type"] for e in resp_json["events"]], - ) - - def test_msg_status_badge(self): - msg = self.create_outgoing_msg(self.joe, "This is an outgoing message") - - # wired has a primary color check - msg.status = Msg.STATUS_WIRED - self.assertIn('"check"', msg_status_badge(msg)) - self.assertIn("--color-primary-dark", msg_status_badge(msg)) - - # delivered has a success check - msg.status = Msg.STATUS_DELIVERED - self.assertIn('"check"', msg_status_badge(msg)) - self.assertIn("--success-rgb", msg_status_badge(msg)) - - # errored show retrying icon - msg.status = Msg.STATUS_ERRORED - self.assertIn('"retry"', msg_status_badge(msg)) - - # failed messages show an x - msg.status = Msg.STATUS_FAILED - self.assertIn('"x"', msg_status_badge(msg)) - - def test_get_scheduled_messages(self): - just_joe = self.create_group("Just Joe", [self.joe]) - - self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) - - broadcast = self.create_broadcast(self.admin, {"eng": {"text": "Hello"}}, contacts=[self.frank]) - self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) - - broadcast.contacts.add(self.joe) - - self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) - - schedule_time = timezone.now() + timedelta(days=2) - broadcast.schedule = Schedule.create(self.org, schedule_time, Schedule.REPEAT_NEVER) - broadcast.save(update_fields=("schedule",)) - - self.assertEqual(self.joe.get_scheduled_broadcasts().count(), 1) - self.assertIn(broadcast, self.joe.get_scheduled_broadcasts()) - - broadcast.contacts.remove(self.joe) - self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) - - broadcast.groups.add(just_joe) - self.assertEqual(self.joe.get_scheduled_broadcasts().count(), 1) - self.assertIn(broadcast, self.joe.get_scheduled_broadcasts()) - - broadcast.groups.remove(just_joe) - self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) - - def test_update_urns_field(self): - update_url = reverse("contacts.contact_update", args=[self.joe.pk]) - - # we have a field to add new urns - response = self.fetch_protected(update_url, self.admin) - self.assertEqual(self.joe, response.context["object"]) - self.assertContains(response, "Add Connection") - - # no field to add new urns for anon org - with self.anonymous(self.org): - response = self.fetch_protected(update_url, self.admin) - self.assertEqual(self.joe, response.context["object"]) - self.assertNotContains(response, "Add Connection") - - @mock_mailroom - def test_contacts_search(self, mr_mocks): - search_url = reverse("contacts.contact_search") - self.login(self.admin) - - mr_mocks.contact_search("Frank", cleaned='name ~ "Frank"', contacts=[self.frank]) - - response = self.client.get(search_url + "?search=Frank") - self.assertEqual(200, response.status_code) - results = response.json() - - # check that we get a total and a sample - self.assertEqual(1, results["total"]) - self.assertEqual(1, len(results["sample"])) - self.assertEqual("+250 782 222 222", results["sample"][0]["primary_urn_formatted"]) - - # our query should get expanded into a proper query - self.assertEqual('name ~ "Frank"', results["query"]) - - # check no primary urn - self.frank.urns.all().delete() - response = self.client.get(search_url + "?search=Frank") - self.assertEqual(200, response.status_code) - results = response.json() - self.assertEqual("--", results["sample"][0]["primary_urn_formatted"]) - - # no query, no results - response = self.client.get(search_url) - results = response.json() - self.assertEqual(0, results["total"]) - - mr_mocks.exception(mailroom.QueryValidationException("mismatched input at ", "syntax")) - - # bogus query - response = self.client.get(search_url + '?search=name="notclosed') - results = response.json() - self.assertEqual("Invalid query syntax.", results["error"]) - self.assertEqual(0, results["total"]) - - # if we query a field, it should show up in our field dict - age = self.create_field("age", "Age", ContactField.TYPE_NUMBER) - - mr_mocks.contact_search("age>32", cleaned='age > 32"', contacts=[self.frank], fields=[age]) - - response = self.client.get(search_url + "?search=age>32") - results = response.json() - self.assertEqual("Age", results["fields"][str(age.uuid)]["label"]) - - @mock_mailroom - def test_update_status(self, mr_mocks): - self.login(self.admin) - - self.assertEqual(Contact.STATUS_ACTIVE, self.joe.status) - - for status, _ in Contact.STATUS_CHOICES: - self.client.post(reverse("contacts.contact_update", args=[self.joe.id]), {"status": status}) - - self.joe.refresh_from_db() - self.assertEqual(status, self.joe.status) - - def test_update(self): - # if new values don't differ from current values.. no modifications - self.assertEqual([], self.joe.update(name="Joe Blow", language="")) - - # change language - self.assertEqual([modifiers.Language(language="eng")], self.joe.update(name="Joe Blow", language="eng")) - - self.joe.language = "eng" - self.joe.save(update_fields=("language",)) - - # change name - self.assertEqual([modifiers.Name(name="Joseph Blow")], self.joe.update(name="Joseph Blow", language="eng")) - - # change both name and language - self.assertEqual( - [modifiers.Name(name="Joseph Blower"), modifiers.Language(language="spa")], - self.joe.update(name="Joseph Blower", language="spa"), - ) - - @mock_mailroom - def test_update_static_groups(self, mr_mocks): - # create some static groups - spammers = self.create_group("Spammers", []) - testers = self.create_group("Testers", []) - customers = self.create_group("Customers", []) - - self.assertEqual(set(spammers.contacts.all()), set()) - self.assertEqual(set(testers.contacts.all()), set()) - self.assertEqual(set(customers.contacts.all()), set()) - - # add to 2 static groups - mods = self.joe.update_static_groups([spammers, testers]) - self.assertEqual( - [ - modifiers.Groups( - modification="add", - groups=[ - modifiers.GroupRef(uuid=spammers.uuid, name="Spammers"), - modifiers.GroupRef(uuid=testers.uuid, name="Testers"), - ], - ), - ], - mods, - ) - - self.joe.modify(self.admin, mods) - - # remove from one and add to another - mods = self.joe.update_static_groups([testers, customers]) - - self.assertEqual( - [ - modifiers.Groups( - modification="remove", groups=[modifiers.GroupRef(uuid=spammers.uuid, name="Spammers")] - ), - modifiers.Groups( - modification="add", groups=[modifiers.GroupRef(uuid=customers.uuid, name="Customers")] - ), - ], - mods, - ) - - @mock_mailroom - def test_bulk_modify_with_no_contacts(self, mr_mocks): - Contact.bulk_modify(self.admin, [], [modifiers.Language(language="spa")]) - - # just a NOOP - self.assertEqual([], mr_mocks.calls["contact_modify"]) - - @mock_mailroom - def test_contact_model(self, mr_mocks): - contact = self.create_contact(name="Boy", phone="12345") - self.assertEqual(contact.get_display(), "Boy") - - contact3 = self.create_contact(name=None, phone="0788111222") - self.channel.country = "RW" - self.channel.save() - - normalized = contact3.get_urn(URN.TEL_SCHEME).ensure_number_normalization(self.channel) - self.assertEqual(normalized.path, "+250788111222") - - contact4 = self.create_contact(name=None, phone="0788333444") - normalized = contact4.get_urn(URN.TEL_SCHEME).ensure_number_normalization(self.channel) - self.assertEqual(normalized.path, "+250788333444") - - contact5 = self.create_contact(name="Jimmy", phone="+250788333555") - mods = contact5.update_urns(["twitter:jimmy_woot", "tel:0788333666"]) - contact5.modify(self.user, mods) - - # check old phone URN still existing but was detached - self.assertIsNone(ContactURN.objects.get(identity="tel:+250788333555").contact) - - # check new URNs were created and attached - self.assertEqual(contact5, ContactURN.objects.get(identity="tel:+250788333666").contact) - self.assertEqual(contact5, ContactURN.objects.get(identity="twitter:jimmy_woot").contact) - - # check twitter URN takes priority if you don't specify scheme - self.assertEqual("twitter:jimmy_woot", str(contact5.get_urn())) - self.assertEqual("twitter:jimmy_woot", str(contact5.get_urn(schemes=[URN.TWITTER_SCHEME]))) - self.assertEqual("tel:+250788333666", str(contact5.get_urn(schemes=[URN.TEL_SCHEME]))) - self.assertIsNone(contact5.get_urn(schemes=["email"])) - self.assertIsNone(contact5.get_urn(schemes=["facebook"])) - - def test_field_json(self): - self.setUpLocations() - - # simple text field - self.set_contact_field(self.joe, "dog", "Chef") - self.joe.refresh_from_db() - dog_uuid = str(ContactField.user_fields.get(key="dog").uuid) - - self.assertEqual(self.joe.fields, {dog_uuid: {"text": "Chef"}}) - - self.set_contact_field(self.joe, "dog", "") - self.joe.refresh_from_db() - self.assertEqual(self.joe.fields, {}) - - # numeric field value - self.set_contact_field(self.joe, "dog", "23.00") - self.joe.refresh_from_db() - self.assertEqual(self.joe.fields, {dog_uuid: {"text": "23.00", "number": 23}}) - - # numeric field value - self.set_contact_field(self.joe, "dog", "37.27903") - self.joe.refresh_from_db() - self.assertEqual(self.joe.fields, {dog_uuid: {"text": "37.27903", "number": Decimal("37.27903")}}) - - # numeric field values that could be NaN, we don't support that - self.set_contact_field(self.joe, "dog", "NaN") - self.joe.refresh_from_db() - self.assertEqual(self.joe.fields, {dog_uuid: {"text": "NaN"}}) - - # datetime instead - self.set_contact_field(self.joe, "dog", "2018-03-05T02:31:00.000Z") - self.joe.refresh_from_db() - self.assertEqual( - self.joe.fields, {dog_uuid: {"text": "2018-03-05T02:31:00.000Z", "datetime": "2018-03-05T04:31:00+02:00"}} - ) - - # setting another field doesn't ruin anything - self.set_contact_field(self.joe, "cat", "Rando") - self.joe.refresh_from_db() - cat_uuid = str(ContactField.user_fields.get(key="cat").uuid) - self.assertEqual( - self.joe.fields, - { - dog_uuid: {"text": "2018-03-05T02:31:00.000Z", "datetime": "2018-03-05T04:31:00+02:00"}, - cat_uuid: {"text": "Rando"}, - }, - ) - - # setting a fully qualified path parses to that level, regardless of field type - self.set_contact_field(self.joe, "cat", "Rwanda > Kigali City") - self.joe.refresh_from_db() - self.assertEqual( - self.joe.fields, - { - dog_uuid: {"text": "2018-03-05T02:31:00.000Z", "datetime": "2018-03-05T04:31:00+02:00"}, - cat_uuid: {"text": "Rwanda > Kigali City", "state": "Rwanda > Kigali City"}, - }, - ) - - # clear our previous fields - self.set_contact_field(self.joe, "dog", "") - self.assertEqual(self.joe.fields, {cat_uuid: {"text": "Rwanda > Kigali City", "state": "Rwanda > Kigali City"}}) - self.joe.refresh_from_db() - - self.set_contact_field(self.joe, "cat", "") - self.joe.refresh_from_db() - - # change a field to an invalid field value type - self.set_contact_field(self.joe, "cat", "xx") - ContactField.user_fields.filter(key="cat").update(value_type="Z") - bad_field = ContactField.user_fields.get(key="cat") - - with self.assertRaises(KeyError): - self.joe.get_field_serialized(bad_field) - - with self.assertRaises(KeyError): - self.joe.get_field_value(bad_field) - - def test_field_values(self): - self.setUpLocations() - - registration_field = self.create_field( - "registration_date", "Registration Date", value_type=ContactField.TYPE_DATETIME - ) - weight_field = self.create_field("weight", "Weight", value_type=ContactField.TYPE_NUMBER) - color_field = self.create_field("color", "Color", value_type=ContactField.TYPE_TEXT) - state_field = self.create_field("state", "State", value_type=ContactField.TYPE_STATE) - - # none value instances - self.assertEqual(self.joe.get_field_serialized(weight_field), None) - self.assertEqual(self.joe.get_field_display(weight_field), "") - self.assertEqual(self.joe.get_field_serialized(registration_field), None) - self.assertEqual(self.joe.get_field_display(registration_field), "") - - self.set_contact_field(self.joe, "registration_date", "2014-12-31T01:04:00Z") - self.set_contact_field(self.joe, "weight", "75.888888") - self.set_contact_field(self.joe, "color", "green") - self.set_contact_field(self.joe, "state", "kigali city") - - self.assertEqual(self.joe.get_field_serialized(registration_field), "2014-12-31T03:04:00+02:00") - - self.assertEqual(self.joe.get_field_serialized(weight_field), "75.888888") - self.assertEqual(self.joe.get_field_display(weight_field), "75.888888") - - self.set_contact_field(self.joe, "weight", "0") - self.assertEqual(self.joe.get_field_serialized(weight_field), "0") - self.assertEqual(self.joe.get_field_display(weight_field), "0") - - # passing something non-numeric to a decimal field - self.set_contact_field(self.joe, "weight", "xxx") - self.assertEqual(self.joe.get_field_serialized(weight_field), None) - self.assertEqual(self.joe.get_field_display(weight_field), "") - - self.assertEqual(self.joe.get_field_serialized(state_field), "Rwanda > Kigali City") - self.assertEqual(self.joe.get_field_display(state_field), "Kigali City") - - self.assertEqual(self.joe.get_field_serialized(color_field), "green") - self.assertEqual(self.joe.get_field_display(color_field), "green") - - # can fetch proxy fields too - created_on = self.org.fields.get(key="created_on") - last_seen_on = self.org.fields.get(key="last_seen_on") - - self.assertEqual(self.joe.get_field_display(created_on), self.org.format_datetime(self.joe.created_on)) - self.assertEqual(self.joe.get_field_display(last_seen_on), "") - - def test_set_location_fields(self): - self.setUpLocations() - - district_field = self.create_field("district", "District", value_type=ContactField.TYPE_DISTRICT) - not_state_field = self.create_field("not_state", "Not State", value_type=ContactField.TYPE_TEXT) - - # add duplicate district in different states - east_province = AdminBoundary.create(osm_id="R005", name="East Province", level=1, parent=self.country) - AdminBoundary.create(osm_id="R004", name="Remera", level=2, parent=east_province) - kigali = AdminBoundary.objects.get(name="Kigali City") - AdminBoundary.create(osm_id="R003", name="Remera", level=2, parent=kigali) - - joe = Contact.objects.get(pk=self.joe.pk) - self.set_contact_field(joe, "district", "Remera") - - # empty because it is ambiguous - self.assertFalse(joe.get_field_value(district_field)) - - state_field = self.create_field("state", "State", value_type=ContactField.TYPE_STATE) - - self.set_contact_field(joe, "state", "Kigali city") - self.assertEqual("Kigali City", joe.get_field_display(state_field)) - self.assertEqual("Rwanda > Kigali City", joe.get_field_serialized(state_field)) - - # test that we don't normalize non-location fields - self.set_contact_field(joe, "not_state", "kigali city") - self.assertEqual("kigali city", joe.get_field_display(not_state_field)) - self.assertEqual("kigali city", joe.get_field_serialized(not_state_field)) - - self.set_contact_field(joe, "district", "Remera") - self.assertEqual("Remera", joe.get_field_display(district_field)) - self.assertEqual("Rwanda > Kigali City > Remera", joe.get_field_serialized(district_field)) - - def test_set_location_ward_fields(self): - self.setUpLocations() - - state = AdminBoundary.create(osm_id="3710302", name="Kano", level=1, parent=self.country) - district = AdminBoundary.create(osm_id="3710307", name="Bichi", level=2, parent=state) - AdminBoundary.create(osm_id="3710377", name="Bichi", level=3, parent=district) - - self.create_field("state", "State", value_type=ContactField.TYPE_STATE) - self.create_field("district", "District", value_type=ContactField.TYPE_DISTRICT) - ward = self.create_field("ward", "Ward", value_type=ContactField.TYPE_WARD) - - jemila = self.create_contact( - name="Jemila Alley", - urns=["tel:123", "twitter:fulani_p"], - fields={"state": "kano", "district": "bichi", "ward": "bichi"}, - ) - self.assertEqual(jemila.get_field_serialized(ward), "Rwanda > Kano > Bichi > Bichi") - - -class ContactURNTest(TembaTest): - def setUp(self): - super().setUp() - - def test_get_display(self): - urn = ContactURN.objects.create( - org=self.org, scheme="tel", path="+250788383383", identity="tel:+250788383383", priority=50 - ) - self.assertEqual(urn.get_display(self.org), "0788 383 383") - self.assertEqual(urn.get_display(self.org, formatted=False), "+250788383383") - self.assertEqual(urn.get_display(self.org, international=True), "+250 788 383 383") - self.assertEqual(urn.get_display(self.org, formatted=False, international=True), "+250788383383") - - # friendly tel formatting for whatsapp too - urn = ContactURN.objects.create( - org=self.org, scheme="whatsapp", path="12065551212", identity="whatsapp:12065551212", priority=50 - ) - self.assertEqual(urn.get_display(self.org), "(206) 555-1212") - - # use path for other schemes - urn = ContactURN.objects.create( - org=self.org, scheme="twitter", path="billy_bob", identity="twitter:billy_bob", priority=50 - ) - self.assertEqual(urn.get_display(self.org), "billy_bob") - - # unless there's a display property - urn = ContactURN.objects.create( - org=self.org, - scheme="twitter", - path="jimmy_john", - identity="twitter:jimmy_john", - priority=50, - display="JIM", - ) - self.assertEqual(urn.get_display(self.org), "JIM") - - def test_empty_scheme_disallowed(self): - with self.assertRaises(IntegrityError): - ContactURN.objects.create(org=self.org, scheme="", path="1234", identity=":1234") - - def test_empty_path_disallowed(self): - with self.assertRaises(IntegrityError): - ContactURN.objects.create(org=self.org, scheme="ext", path="", identity="ext:") - - def test_identity_mismatch_disallowed(self): - with self.assertRaises(IntegrityError): - ContactURN.objects.create(org=self.org, scheme="ext", path="1234", identity="ext:5678") - - def test_ensure_normalization(self): - contact1 = self.create_contact("Bob", urns=["tel:+250788111111"]) - contact2 = self.create_contact("Jim", urns=["tel:+0788222222"]) - - self.org.normalize_contact_tels() - - self.assertEqual("+250788111111", contact1.urns.get().path) - self.assertEqual("+250788222222", contact2.urns.get().path) - - -class ContactFieldTest(TembaTest): - def setUp(self): - super().setUp() - - self.joe = self.create_contact(name="Joe Blow", phone="123") - self.frank = self.create_contact(name="Frank Smith", phone="1234") - - self.contactfield_1 = self.create_field("first", "First", priority=10) - self.contactfield_2 = self.create_field("second", "Second") - self.contactfield_3 = self.create_field("third", "Third", priority=20) - - self.other_org_field = self.create_field("other", "Other", priority=10, org=self.org2) - - def test_get_or_create(self): - # name can be generated - field1 = ContactField.get_or_create(self.org, self.admin, "join_date") - self.assertEqual("join_date", field1.key) - self.assertEqual("Join Date", field1.name) - self.assertEqual(ContactField.TYPE_TEXT, field1.value_type) - self.assertFalse(field1.is_system) - - # or passed explicitly along with type - field2 = ContactField.get_or_create( - self.org, self.admin, "another", name="My Label", value_type=ContactField.TYPE_NUMBER - ) - self.assertEqual("another", field2.key) - self.assertEqual("My Label", field2.name) - self.assertEqual(ContactField.TYPE_NUMBER, field2.value_type) - - # if there's an existing key with this key we get that with name and type updated - field3 = ContactField.get_or_create( - self.org, self.admin, "another", name="Updated Label", value_type=ContactField.TYPE_DATETIME - ) - self.assertEqual(field2, field3) - self.assertEqual("another", field3.key) - self.assertEqual("Updated Label", field3.name) - self.assertEqual(ContactField.TYPE_DATETIME, field3.value_type) - - field4 = ContactField.get_or_create(self.org, self.admin, "another", name="Updated Again Label") - self.assertEqual(field3, field4) - self.assertEqual("another", field4.key) - self.assertEqual("Updated Again Label", field4.name) - self.assertEqual(ContactField.TYPE_DATETIME, field4.value_type) # unchanged - - # can't create with an invalid key - for key in ContactField.RESERVED_KEYS: - with self.assertRaises(ValueError): - ContactField.get_or_create(self.org, self.admin, key, key, value_type=ContactField.TYPE_TEXT) - - # provided names are made unique - field5 = ContactField.get_or_create(self.org, self.admin, "date_joined", name="join date") - self.assertEqual("date_joined", field5.key) - self.assertEqual("join date 2", field5.name) - - # and ignored if not valid - field6 = ContactField.get_or_create(self.org, self.admin, "date_joined", name=" ") - self.assertEqual(field5, field6) - self.assertEqual("date_joined", field6.key) - self.assertEqual("join date 2", field6.name) # unchanged - - # same for creating a new field - field7 = ContactField.get_or_create(self.org, self.admin, "new_key", name=" ") - self.assertEqual("new_key", field7.key) - self.assertEqual("New Key", field7.name) # generated - - def test_contact_templatetag(self): - ContactField.get_or_create( - self.org, self.admin, "date_joined", name="join date", value_type=ContactField.TYPE_DATETIME - ) - - self.set_contact_field(self.joe, "first", "Starter") - self.set_contact_field(self.joe, "date_joined", "01-01-2022 8:30") - - self.assertEqual(contact_field(self.joe, "first"), "Starter") - self.assertEqual( - contact_field(self.joe, "date_joined"), - "", - ) - self.assertEqual(contact_field(self.joe, "not_there"), "--") - - def test_make_key(self): - self.assertEqual("first_name", ContactField.make_key("First Name")) - self.assertEqual("second_name", ContactField.make_key("Second Name ")) - self.assertEqual("caf", ContactField.make_key("café")) - self.assertEqual( - "323_ffsn_slfs_ksflskfs_fk_anfaddgas", - ContactField.make_key(" ^%$# %$$ $##323 ffsn slfs ksflskfs!!!! fk$%%%$$$anfaDDGAS ))))))))) "), - ) - - def test_is_valid_key(self): - self.assertTrue(ContactField.is_valid_key("age")) - self.assertTrue(ContactField.is_valid_key("age_now_2")) - self.assertTrue(ContactField.is_valid_key("email")) - self.assertFalse(ContactField.is_valid_key("Age")) # must be lowercase - self.assertFalse(ContactField.is_valid_key("age!")) # can't have punctuation - self.assertFalse(ContactField.is_valid_key("âge")) # a-z only - self.assertFalse(ContactField.is_valid_key("2up")) # can't start with a number - self.assertFalse(ContactField.is_valid_key("has")) # can't be reserved key - self.assertFalse(ContactField.is_valid_key("is")) - self.assertFalse(ContactField.is_valid_key("fields")) - self.assertFalse(ContactField.is_valid_key("urns")) - self.assertFalse(ContactField.is_valid_key("a" * 37)) # too long - - def test_is_valid_name(self): - self.assertTrue(ContactField.is_valid_name("Age")) - self.assertTrue(ContactField.is_valid_name("Age Now 2")) - self.assertFalse(ContactField.is_valid_name("Age_Now")) # can't have punctuation - self.assertFalse(ContactField.is_valid_name("âge")) # a-z only - - @mock_mailroom - def test_contact_field_list_sort_fields(self, mr_mocks): - url = reverse("contacts.contact_list") - self.login(self.admin) - - mr_mocks.contact_search("", contacts=[self.joe]) - mr_mocks.contact_search("Joe", contacts=[self.joe]) - - response = self.client.get("%s?sort_on=%s" % (url, str(self.contactfield_1.key))) - - self.assertEqual(response.context["sort_field"], str(self.contactfield_1.key)) - self.assertEqual(response.context["sort_direction"], "asc") - self.assertNotIn("search", response.context) - - response = self.client.get("%s?sort_on=-%s" % (url, str(self.contactfield_1.key))) - - self.assertEqual(response.context["sort_field"], str(self.contactfield_1.key)) - self.assertEqual(response.context["sort_direction"], "desc") - self.assertNotIn("search", response.context) - - response = self.client.get("%s?sort_on=%s" % (url, "created_on")) - - self.assertEqual(response.context["sort_field"], "created_on") - self.assertEqual(response.context["sort_direction"], "asc") - self.assertNotIn("search", response.context) - - response = self.client.get("%s?sort_on=-%s&search=Joe" % (url, "created_on")) - - self.assertEqual(response.context["sort_field"], "created_on") - self.assertEqual(response.context["sort_direction"], "desc") - self.assertIn("search", response.context) - - def test_view_updatepriority_valid(self): - org_fields = ContactField.user_fields.filter(org=self.org, is_active=True) - - self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) - - self.login(self.admin) - updatepriority_cf_url = reverse("contacts.contactfield_update_priority") - - # there should be no updates because CFs with ids do not exist - post_data = json.dumps({123_123: 1000, 123_124: 999, 123_125: 998}) - - response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json()["status"], "OK") - - self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) - - # build valid post data - post_data = json.dumps({cf.key: index for index, cf in enumerate(org_fields.order_by("id"))}) - - # try to update as admin2 - self.login(self.admin2) - response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") - - # nothing changed - self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) - - # then as real admin - self.login(self.admin) - response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json()["status"], "OK") - - self.assertListEqual([0, 1, 2], [cf.priority for cf in org_fields.order_by("id")]) - - def test_view_updatepriority_invalid(self): - org_fields = ContactField.user_fields.filter(org=self.org, is_active=True) - - self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) - - self.login(self.admin) - updatepriority_cf_url = reverse("contacts.contactfield_update_priority") - - post_data = '{invalid_json": 123}' - - response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") - self.assertEqual(response.status_code, 400) - response_json = response.json() - self.assertEqual(response_json["status"], "ERROR") - self.assertEqual( - response_json["err_detail"], "Expecting property name enclosed in double quotes: line 1 column 2 (char 1)" - ) - - -class ContactFieldCRUDLTest(TembaTest, CRUDLTestMixin): - def setUp(self): - super().setUp() - - self.age = self.create_field("age", "Age", value_type="N", show_in_table=True) - self.gender = self.create_field("gender", "Gender", value_type="T") - self.state = self.create_field("state", "State", value_type="S") - - self.deleted = self.create_field("foo", "Foo") - self.deleted.is_active = False - self.deleted.save(update_fields=("is_active",)) - - self.other_org_field = self.create_field("other", "Other", org=self.org2) - - def test_create(self): - create_url = reverse("contacts.contactfield_create") - - self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) - - # for a deploy that doesn't have locations feature, don't show location field types - with override_settings(FEATURES={}): - response = self.assertCreateFetch( - create_url, - [self.editor, self.admin], - form_fields=["name", "value_type", "show_in_table", "agent_access"], - ) - self.assertEqual( - [("T", "Text"), ("N", "Number"), ("D", "Date & Time")], - response.context["form"].fields["value_type"].choices, - ) - - response = self.assertCreateFetch( - create_url, - [self.editor, self.admin], - form_fields=["name", "value_type", "show_in_table", "agent_access"], - ) - self.assertEqual( - [("T", "Text"), ("N", "Number"), ("D", "Date & Time"), ("S", "State"), ("I", "District"), ("W", "Ward")], - response.context["form"].fields["value_type"].choices, - ) - - # try to submit with empty name - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "", "value_type": "T", "show_in_table": True, "agent_access": "E"}, - form_errors={"name": "This field is required."}, - ) - - # try to submit with invalid name - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "???", "value_type": "T", "show_in_table": True, "agent_access": "E"}, - form_errors={"name": "Can only contain letters, numbers and hypens."}, - ) - - # try to submit with something that would be an invalid key - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "HAS", "value_type": "T", "show_in_table": True, "agent_access": "E"}, - form_errors={"name": "Can't be a reserved word."}, - ) - - # try to submit with name of existing field - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "AGE", "value_type": "N", "show_in_table": True, "agent_access": "E"}, - form_errors={"name": "Must be unique."}, - ) - - # submit with valid data - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Goats", "value_type": "N", "show_in_table": True, "agent_access": "E"}, - new_obj_query=ContactField.user_fields.filter( - org=self.org, name="Goats", value_type="N", show_in_table=True, agent_access="E" - ), - success_status=200, - ) - - # it's also ok to create a field with the same name as a deleted field - ContactField.user_fields.get(key="age").release(self.admin) - - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "N"}, - new_obj_query=ContactField.user_fields.filter( - org=self.org, name="Age", value_type="N", show_in_table=True, agent_access="N", is_active=True - ), - success_status=200, - ) - - # simulate an org which has reached the limit for fields - with override_settings(ORG_LIMIT_DEFAULTS={"fields": 2}): - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Sheep", "value_type": "T", "show_in_table": True, "agent_access": "E"}, - form_errors={ - "__all__": "This workspace has reached its limit of 2 fields. You must delete existing ones before you can create new ones." - }, - ) - - def test_update(self): - update_url = reverse("contacts.contactfield_update", args=[self.age.key]) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - - # for a deploy that doesn't have locations feature, don't show location field types - with override_settings(FEATURES={}): - response = self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields={"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "V"}, - ) - self.assertEqual(3, len(response.context["form"].fields["value_type"].choices)) - - response = self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields={"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "V"}, - ) - self.assertEqual(6, len(response.context["form"].fields["value_type"].choices)) - - # try submit without change - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "V"}, - success_status=200, - ) - - # try to submit with empty name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "", "value_type": "N", "show_in_table": True, "agent_access": "V"}, - form_errors={"name": "This field is required."}, - object_unchanged=self.age, - ) - - # try to submit with invalid name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "???", "value_type": "N", "show_in_table": True, "agent_access": "V"}, - form_errors={"name": "Can only contain letters, numbers and hypens."}, - object_unchanged=self.age, - ) - - # try to submit with a name that is used by another field - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "GENDER", "value_type": "N", "show_in_table": True, "agent_access": "V"}, - form_errors={"name": "Must be unique."}, - object_unchanged=self.age, - ) - - # submit with different name, type and agent access - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Age In Years", "value_type": "T", "show_in_table": False, "agent_access": "E"}, - success_status=200, - ) - - self.age.refresh_from_db() - self.assertEqual("Age In Years", self.age.name) - self.assertEqual("T", self.age.value_type) - self.assertFalse(self.age.show_in_table) - self.assertEqual("E", self.age.agent_access) - - # simulate an org which has reached the limit for fields - should still be able to update a field - with override_settings(ORG_LIMIT_DEFAULTS={"fields": 2}): - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Age 2", "value_type": "T", "show_in_table": True, "agent_access": "E"}, - success_status=200, - ) - - self.age.refresh_from_db() - self.assertEqual("Age 2", self.age.name) - - # create a date field used in a campaign event - registered = self.create_field("registered", "Registered", value_type="D") - campaign = Campaign.create(self.org, self.admin, "Reminders", self.create_group("Farmers")) - CampaignEvent.create_flow_event( - self.org, self.admin, campaign, registered, offset=1, unit="W", flow=self.create_flow("Test") - ) - - update_url = reverse("contacts.contactfield_update", args=[registered.key]) - - self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields={"name": "Registered", "value_type": "D", "show_in_table": False, "agent_access": "V"}, - ) - - # try to submit with different type - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Registered", "value_type": "T", "show_in_table": False, "agent_access": "V"}, - form_errors={"value_type": "Can't change type of date field being used by campaign events."}, - object_unchanged=registered, - ) - - # submit with only a different name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Registered On", "value_type": "D", "show_in_table": False, "agent_access": "V"}, - success_status=200, - ) - - registered.refresh_from_db() - self.assertEqual("Registered On", registered.name) - self.assertEqual("D", registered.value_type) - self.assertFalse(registered.show_in_table) - - def test_list(self): - list_url = reverse("contacts.contactfield_list") - - self.assertRequestDisallowed(list_url, [None, self.agent]) - self.assertListFetch( - list_url, [self.user, self.editor, self.admin], context_objects=[self.age, self.gender, self.state] - ) - self.assertContentMenu(list_url, self.user, []) - self.assertContentMenu(list_url, self.admin, ["New Field"]) - - def test_create_warnings(self): - self.login(self.admin) - create_url = reverse("contacts.contactfield_create") - response = self.client.get(create_url) - - self.assertEqual(3, response.context["total_count"]) - self.assertEqual(250, response.context["total_limit"]) - self.assertNotContains(response, "You have reached the limit") - self.assertNotContains(response, "You are approaching the limit") - - with override_settings(ORG_LIMIT_DEFAULTS={"fields": 10}): - response = self.requestView(create_url, self.admin) - - self.assertContains(response, "You are approaching the limit") - - with override_settings(ORG_LIMIT_DEFAULTS={"fields": 3}): - response = self.requestView(create_url, self.admin) - - self.assertContains(response, "You have reached the limit") - - @mock_mailroom - def test_usages(self, mr_mocks): - flow = self.get_flow("dependencies", name="Dependencies") - field = ContactField.user_fields.filter(is_active=True, org=self.org, key="favorite_cat").get() - field.value_type = ContactField.TYPE_DATETIME - field.save(update_fields=("value_type",)) - - group = self.create_group("Farmers", query='favorite_cat != ""') - campaign = Campaign.create(self.org, self.admin, "Planting Reminders", group) - - # create flow events - event1 = CampaignEvent.create_flow_event( - self.org, - self.admin, - campaign, - relative_to=field, - offset=0, - unit="D", - flow=flow, - delivery_hour=17, - ) - inactive_campaignevent = CampaignEvent.create_flow_event( - self.org, - self.admin, - campaign, - relative_to=field, - offset=0, - unit="D", - flow=flow, - delivery_hour=20, - ) - inactive_campaignevent.is_active = False - inactive_campaignevent.save(update_fields=("is_active",)) - - usages_url = reverse("contacts.contactfield_usages", args=[field.key]) - - self.assertRequestDisallowed(usages_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(usages_url, [self.user, self.editor, self.admin], context_object=field) - - self.assertEqual( - {"flow": [flow], "group": [group], "campaign_event": [event1]}, - {t: list(qs) for t, qs in response.context["dependents"].items()}, - ) - - def test_delete(self): - # create new field 'Joined On' which is used by a campaign event (soft) and a flow (soft) - group = self.create_group("Amazing Group", contacts=[]) - joined_on = self.create_field("joined_on", "Joined On", value_type=ContactField.TYPE_DATETIME) - campaign = Campaign.create(self.org, self.admin, Campaign.get_unique_name(self.org, "Reminders"), group) - flow = self.create_flow("Amazing Flow") - flow.field_dependencies.add(joined_on) - campaign_event = CampaignEvent.create_flow_event( - self.org, self.admin, campaign, joined_on, offset=1, unit="W", flow=flow, delivery_hour=13 - ) - - # make 'Age' appear to be used by a flow (soft) and a group (hard) - flow.field_dependencies.add(self.age) - group.query_fields.add(self.age) - - delete_gender_url = reverse("contacts.contactfield_delete", args=[self.gender.key]) - delete_joined_url = reverse("contacts.contactfield_delete", args=[joined_on.key]) - delete_age_url = reverse("contacts.contactfield_delete", args=[self.age.key]) - - self.assertRequestDisallowed(delete_gender_url, [None, self.user, self.agent, self.admin2]) - - # a field with no dependents can be deleted - response = self.assertDeleteFetch(delete_gender_url, [self.editor, self.admin]) - self.assertEqual({}, response.context["soft_dependents"]) - self.assertEqual({}, response.context["hard_dependents"]) - self.assertContains(response, "You are about to delete") - self.assertContains(response, "There is no way to undo this. Are you sure?") - - self.assertDeleteSubmit(delete_gender_url, self.admin, object_deactivated=self.gender, success_status=200) - - # create the same field again - self.gender = self.create_field("gender", "Gender", value_type="T") - - # since fields are queried by key name, try and delete it again - # to make sure we aren't deleting the previous deleted field again - self.assertDeleteSubmit(delete_gender_url, self.admin, object_deactivated=self.gender, success_status=200) - self.gender.refresh_from_db() - self.assertFalse(self.gender.is_active) - - # a field with only soft dependents can also be deleted but we give warnings - response = self.assertDeleteFetch(delete_joined_url, [self.admin]) - self.assertEqual({"flow", "campaign_event"}, set(response.context["soft_dependents"].keys())) - self.assertEqual({}, response.context["hard_dependents"]) - self.assertContains(response, "is used by the following items but can still be deleted:") - self.assertContains(response, "Amazing Flow") - self.assertContains(response, "There is no way to undo this. Are you sure?") - - self.assertDeleteSubmit(delete_joined_url, self.admin, object_deactivated=joined_on, success_status=200) - - # check that flow is now marked as having issues - flow.refresh_from_db() - self.assertTrue(flow.has_issues) - self.assertNotIn(joined_on, flow.field_dependencies.all()) - - # and that the campaign event is gone - campaign_event.refresh_from_db() - self.assertFalse(campaign_event.is_active) - - # a field with hard dependents can't be deleted - response = self.assertDeleteFetch(delete_age_url, [self.admin]) - self.assertEqual({"flow"}, set(response.context["soft_dependents"].keys())) - self.assertEqual({"group"}, set(response.context["hard_dependents"].keys())) - self.assertContains(response, "can't be deleted as it is still used by the following items:") - self.assertContains(response, "Amazing Group") - self.assertNotContains(response, "Delete") - - -class URNTest(TembaTest): - def test_facebook_urn(self): - self.assertTrue(URN.validate("facebook:ref:asdf")) - - def test_instagram_urn(self): - self.assertTrue(URN.validate("instagram:12345678901234567")) - - def test_discord_urn(self): - self.assertEqual("discord:750841288886321253", URN.from_discord("750841288886321253")) - self.assertTrue(URN.validate(URN.from_discord("750841288886321253"))) - self.assertFalse(URN.validate(URN.from_discord("not-a-discord-id"))) - - def test_whatsapp_urn(self): - self.assertTrue(URN.validate("whatsapp:12065551212")) - self.assertFalse(URN.validate("whatsapp:+12065551212")) - - def test_freshchat_urn(self): - self.assertTrue( - URN.validate("freshchat:c0534f78-b6e9-4f79-8853-11cedfc1f35b/c0534f78-b6e9-4f79-8853-11cedfc1f35b") - ) - self.assertFalse(URN.validate("freshchat:+12065551212")) - - def test_from_parts(self): - self.assertEqual(URN.from_parts("deleted", "12345"), "deleted:12345") - self.assertEqual(URN.from_parts("tel", "12345"), "tel:12345") - self.assertEqual(URN.from_parts("tel", "+12345"), "tel:+12345") - self.assertEqual(URN.from_parts("tel", "(917) 992-5253"), "tel:(917) 992-5253") - self.assertEqual(URN.from_parts("mailto", "a_b+c@d.com"), "mailto:a_b+c@d.com") - self.assertEqual(URN.from_parts("twitterid", "2352362611", display="bobby"), "twitterid:2352362611#bobby") - self.assertEqual( - URN.from_parts("twitterid", "2352362611", query="foo=ba?r", display="bobby"), - "twitterid:2352362611?foo=ba%3Fr#bobby", - ) - - self.assertEqual(URN.from_tel("+12345"), "tel:+12345") - - self.assertRaises(ValueError, URN.from_parts, "", "12345") - self.assertRaises(ValueError, URN.from_parts, "tel", "") - self.assertRaises(ValueError, URN.from_parts, "xxx", "12345") - - def test_to_parts(self): - self.assertEqual(URN.to_parts("deleted:12345"), ("deleted", "12345", None, None)) - self.assertEqual(URN.to_parts("tel:12345"), ("tel", "12345", None, None)) - self.assertEqual(URN.to_parts("tel:+12345"), ("tel", "+12345", None, None)) - self.assertEqual(URN.to_parts("twitter:abc_123"), ("twitter", "abc_123", None, None)) - self.assertEqual(URN.to_parts("mailto:a_b+c@d.com"), ("mailto", "a_b+c@d.com", None, None)) - self.assertEqual(URN.to_parts("facebook:12345"), ("facebook", "12345", None, None)) - self.assertEqual(URN.to_parts("vk:12345"), ("vk", "12345", None, None)) - self.assertEqual(URN.to_parts("telegram:12345"), ("telegram", "12345", None, None)) - self.assertEqual(URN.to_parts("telegram:12345#foobar"), ("telegram", "12345", None, "foobar")) - self.assertEqual(URN.to_parts("ext:Aa0()+,-.:=@;$_!*'"), ("ext", "Aa0()+,-.:=@;$_!*'", None, None)) - self.assertEqual(URN.to_parts("instagram:12345"), ("instagram", "12345", None, None)) - - self.assertRaises(ValueError, URN.to_parts, "tel") - self.assertRaises(ValueError, URN.to_parts, "tel:") # missing scheme - self.assertRaises(ValueError, URN.to_parts, ":12345") # missing path - self.assertRaises(ValueError, URN.to_parts, "x_y:123") # invalid scheme - self.assertRaises(ValueError, URN.to_parts, "xyz:{abc}") # invalid path - - def test_normalize(self): - # valid tel numbers - self.assertEqual(URN.normalize("tel:0788383383", "RW"), "tel:+250788383383") - self.assertEqual(URN.normalize("tel: +250788383383 ", "KE"), "tel:+250788383383") - self.assertEqual(URN.normalize("tel:+250788383383", None), "tel:+250788383383") - self.assertEqual(URN.normalize("tel:250788383383", None), "tel:+250788383383") - self.assertEqual(URN.normalize("tel:2.50788383383E+11", None), "tel:+250788383383") - self.assertEqual(URN.normalize("tel:2.50788383383E+12", None), "tel:+250788383383") - self.assertEqual(URN.normalize("tel:(917)992-5253", "US"), "tel:+19179925253") - self.assertEqual(URN.normalize("tel:19179925253", None), "tel:+19179925253") - self.assertEqual(URN.normalize("tel:+62877747666", None), "tel:+62877747666") - self.assertEqual(URN.normalize("tel:62877747666", "ID"), "tel:+62877747666") - self.assertEqual(URN.normalize("tel:0877747666", "ID"), "tel:+62877747666") - self.assertEqual(URN.normalize("tel:07531669965", "GB"), "tel:+447531669965") - self.assertEqual(URN.normalize("tel:22658125926", ""), "tel:+22658125926") - self.assertEqual(URN.normalize("tel:263780821000", "ZW"), "tel:+263780821000") - self.assertEqual(URN.normalize("tel:+2203693333", ""), "tel:+2203693333") - - # un-normalizable tel numbers - self.assertEqual(URN.normalize("tel:12345", "RW"), "tel:12345") - self.assertEqual(URN.normalize("tel:0788383383", None), "tel:0788383383") - self.assertEqual(URN.normalize("tel:0788383383", "ZZ"), "tel:0788383383") - self.assertEqual(URN.normalize("tel:MTN", "RW"), "tel:mtn") - - # twitter handles remove @ - self.assertEqual(URN.normalize("twitter: @jimmyJO"), "twitter:jimmyjo") - self.assertEqual(URN.normalize("twitterid:12345#@jimmyJO"), "twitterid:12345#jimmyjo") - - # email addresses - self.assertEqual(URN.normalize("mailto: nAme@domAIN.cOm "), "mailto:name@domain.com") - - # external ids are case sensitive - self.assertEqual(URN.normalize("ext: eXterNAL123 "), "ext:eXterNAL123") - - def test_validate(self): - self.assertFalse(URN.validate("xxxx", None)) # un-parseable URNs don't validate - - # valid tel numbers - self.assertTrue(URN.validate("tel:0788383383", "RW")) - self.assertTrue(URN.validate("tel:+250788383383", "KE")) - self.assertTrue(URN.validate("tel:+23761234567", "CM")) # old Cameroon format - self.assertTrue(URN.validate("tel:+237661234567", "CM")) # new Cameroon format - self.assertTrue(URN.validate("tel:+250788383383", None)) - - # invalid tel numbers - self.assertFalse(URN.validate("tel:0788383383", "ZZ")) # invalid country - self.assertFalse(URN.validate("tel:0788383383", None)) # no country - self.assertFalse(URN.validate("tel:MTN", "RW")) - self.assertFalse(URN.validate("tel:5912705", "US")) - - # twitter handles - self.assertTrue(URN.validate("twitter:jimmyjo")) - self.assertTrue(URN.validate("twitter:billy_bob")) - self.assertFalse(URN.validate("twitter:jimmyjo!@")) - self.assertFalse(URN.validate("twitter:billy bob")) - - # twitterid urns - self.assertTrue(URN.validate("twitterid:12345#jimmyjo")) - self.assertTrue(URN.validate("twitterid:12345#1234567")) - self.assertFalse(URN.validate("twitterid:jimmyjo#1234567")) - self.assertFalse(URN.validate("twitterid:123#a.!f")) - - # email addresses - self.assertTrue(URN.validate("mailto:abcd+label@x.y.z.com")) - self.assertFalse(URN.validate("mailto:@@@")) - - # viber urn - self.assertTrue(URN.validate("viber:dKPvqVrLerGrZw15qTuVBQ==")) - - # facebook, telegram, vk and instagram URN paths must be integers - self.assertTrue(URN.validate("telegram:12345678901234567")) - self.assertFalse(URN.validate("telegram:abcdef")) - self.assertTrue(URN.validate("facebook:12345678901234567")) - self.assertFalse(URN.validate("facebook:abcdef")) - self.assertTrue(URN.validate("vk:12345678901234567")) - self.assertTrue(URN.validate("instagram:12345678901234567")) - self.assertFalse(URN.validate("instagram:abcdef")) - - -class ContactImportTest(TembaTest): - def test_parse_errors(self): - # try to open an import that is completely empty - with self.assertRaisesRegex(ValidationError, "Import file appears to be empty."): - path = "media/test_imports/empty_all_rows.xlsx" # No header row present either - with open(path, "rb") as f: - ContactImport.try_to_parse(self.org, f, path) - - def try_to_parse(name): - path = f"media/test_imports/{name}" - with open(path, "rb") as f: - ContactImport.try_to_parse(self.org, f, path) - - # try to open an import that exceeds the record limit - with patch("temba.contacts.models.ContactImport.MAX_RECORDS", 2): - with self.assertRaisesRegex(ValidationError, r"Import files can contain a maximum of 2 records\."): - try_to_parse("simple.xlsx") - - bad_files = [ - ("empty.xlsx", "Import file doesn't contain any records."), - ("empty_header.xlsx", "Import file contains an empty header."), - ("duplicate_urn.xlsx", "Import file contains duplicated contact URN 'tel:+250788382382' on row 4."), - ( - "duplicate_uuid.xlsx", - "Import file contains duplicated contact UUID 'f519ca1f-8513-49ba-8896-22bf0420dec7' on row 4.", - ), - ("invalid_scheme.xlsx", "Header 'URN:XXX' is not a valid URN type."), - ("invalid_field_key.xlsx", "Header 'Field: #$^%' is not a valid field name."), - ("reserved_field_key.xlsx", "Header 'Field:HAS' is not a valid field name."), - ("no_urn_or_uuid.xlsx", "Import files must contain either UUID or a URN header."), - ("uuid_only.xlsx", "Import files must contain columns besides UUID."), - ] - - for imp_file, imp_error in bad_files: - with self.assertRaises(ValidationError, msg=f"expected error in {imp_file}") as e: - try_to_parse(imp_file) - self.assertEqual(imp_error, e.exception.messages[0], f"error mismatch for {imp_file}") - - def test_extract_mappings(self): - # try simple import in different formats - for ext in ("xlsx",): - imp = self.create_contact_import(f"media/test_imports/simple.{ext}") - self.assertEqual(3, imp.num_records) - self.assertEqual( - [ - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "name", "mapping": {"type": "attribute", "name": "name"}}, - ], - imp.mappings, - ) - - # try import with 2 URN types - imp = self.create_contact_import("media/test_imports/twitter_and_phone.xlsx") - self.assertEqual( - [ - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "name", "mapping": {"type": "attribute", "name": "name"}}, - {"header": "URN:Twitter", "mapping": {"type": "scheme", "scheme": "twitter"}}, - ], - imp.mappings, - ) - - # or with 3 URN columns - imp = self.create_contact_import("media/test_imports/multiple_tel_urns.xlsx") - self.assertEqual( - [ - {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - ], - imp.mappings, - ) - - imp = self.create_contact_import("media/test_imports/missing_name_header.xlsx") - self.assertEqual([{"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}], imp.mappings) - - self.create_field("goats", "Num Goats", ContactField.TYPE_NUMBER) - - imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") - self.assertEqual( - [ - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, - {"header": "language", "mapping": {"type": "attribute", "name": "language"}}, - {"header": "Status", "mapping": {"type": "attribute", "name": "status"}}, - {"header": "Created On", "mapping": {"type": "ignore"}}, - { - "header": "field: goats", - "mapping": {"type": "field", "key": "goats", "name": "Num Goats"}, # matched by key - }, - { - "header": "Field:Sheep", - "mapping": {"type": "new_field", "key": "sheep", "name": "Sheep", "value_type": "T"}, - }, - {"header": "Group:Testers", "mapping": {"type": "ignore"}}, - ], - imp.mappings, - ) - - # it's possible for field keys and labels to be out of sync, in which case we match by label first because - # that's how we export contacts - self.create_field("num_goats", "Goats", ContactField.TYPE_NUMBER) - - imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") - self.assertEqual( - { - "header": "field: goats", - "mapping": {"type": "field", "key": "num_goats", "name": "Goats"}, # matched by label - }, - imp.mappings[5], - ) - - # a header can be a number but it will be ignored - imp = self.create_contact_import("media/test_imports/numerical_header.xlsx") - self.assertEqual( - [ - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "Name", "mapping": {"name": "name", "type": "attribute"}}, - {"header": "123", "mapping": {"type": "ignore"}}, - ], - imp.mappings, - ) - - self.create_field("a_number", "A-Number", ContactField.TYPE_NUMBER) - - imp = self.create_contact_import("media/test_imports/header_chars.xlsx") - self.assertEqual( - [ - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, - {"header": "Field: A-Number", "mapping": {"type": "field", "key": "a_number", "name": "A-Number"}}, - ], - imp.mappings, - ) - - @mock_mailroom - def test_batches(self, mr_mocks): - imp = self.create_contact_import("media/test_imports/simple.xlsx") - self.assertEqual(3, imp.num_records) - self.assertIsNone(imp.started_on) - - # info can be fetched but it's empty - self.assertEqual( - {"status": "P", "num_created": 0, "num_updated": 0, "num_errored": 0, "errors": [], "time_taken": 0}, - imp.get_info(), - ) - - imp.start() - batches = list(imp.batches.order_by("id")) - - self.assertIsNotNone(imp.started_on) - self.assertEqual(1, len(batches)) - self.assertEqual(0, batches[0].record_start) - self.assertEqual(3, batches[0].record_end) - self.assertEqual( - [ - { - "_import_row": 2, - "name": "Eric Newcomer", - "urns": ["tel:+250788382382"], - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 3, - "name": "NIC POTTIER", - "urns": ["tel:+250788383383"], - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 4, - "name": "jen newcomer", - "urns": ["tel:+250788383385"], - "groups": [str(imp.group.uuid)], - }, - ], - batches[0].specs, - ) - - # check batch was queued for import by mailroom - self.assertEqual( - [ - { - "type": "import_contact_batch", - "org_id": self.org.id, - "task": {"contact_import_batch_id": batches[0].id}, - "queued_on": matchers.Datetime(), - }, - ], - mr_mocks.queued_batch_tasks, - ) - - # records are batched if they exceed batch size - with patch("temba.contacts.models.ContactImport.BATCH_SIZE", 2): - imp = self.create_contact_import("media/test_imports/simple.xlsx") - imp.start() - - batches = list(imp.batches.order_by("id")) - self.assertEqual(2, len(batches)) - self.assertEqual(0, batches[0].record_start) - self.assertEqual(2, batches[0].record_end) - self.assertEqual(2, batches[1].record_start) - self.assertEqual(3, batches[1].record_end) - - # info is calculated across all batches - self.assertEqual( - { - "status": "O", - "num_created": 0, - "num_updated": 0, - "num_errored": 0, - "errors": [], - "time_taken": matchers.Int(min=0), - }, - imp.get_info(), - ) - - # simulate mailroom starting to process first batch - imp.batches.filter(id=batches[0].id).update( - status="O", num_created=2, num_updated=1, errors=[{"record": 1, "message": "that's wrong"}] - ) - - self.assertEqual( - { - "status": "O", - "num_created": 2, - "num_updated": 1, - "num_errored": 0, - "errors": [{"record": 1, "message": "that's wrong"}], - "time_taken": matchers.Int(min=0), - }, - imp.get_info(), - ) - - # simulate mailroom completing first batch, starting second - imp.batches.filter(id=batches[0].id).update(status="C", finished_on=timezone.now()) - imp.batches.filter(id=batches[1].id).update( - status="O", num_created=3, num_updated=5, errors=[{"record": 3, "message": "that's not right"}] - ) - - self.assertEqual( - { - "status": "O", - "num_created": 5, - "num_updated": 6, - "num_errored": 0, - "errors": [{"record": 1, "message": "that's wrong"}, {"record": 3, "message": "that's not right"}], - "time_taken": matchers.Int(min=0), - }, - imp.get_info(), - ) - - # simulate mailroom completing second batch - imp.batches.filter(id=batches[1].id).update(status="C", finished_on=timezone.now()) - imp.status = "C" - imp.finished_on = timezone.now() - imp.save(update_fields=("finished_on", "status")) - - self.assertEqual( - { - "status": "C", - "num_created": 5, - "num_updated": 6, - "num_errored": 0, - "errors": [{"record": 1, "message": "that's wrong"}, {"record": 3, "message": "that's not right"}], - "time_taken": matchers.Int(min=0), - }, - imp.get_info(), - ) - - @mock_mailroom - def test_batches_with_fields(self, mr_mocks): - self.create_field("goats", "Goats", ContactField.TYPE_NUMBER) - - imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") - imp.start() - batch = imp.batches.get() # single batch - - self.assertEqual( - [ - { - "_import_row": 2, - "name": "John Doe", - "language": "eng", - "status": "archived", - "urns": ["tel:+250788123123"], - "fields": {"goats": "1", "sheep": "0"}, - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 3, - "name": "Mary Smith", - "language": "spa", - "status": "blocked", - "urns": ["tel:+250788456456"], - "fields": {"goats": "3", "sheep": "5"}, - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 4, - "urns": ["tel:+250788456678"], - "groups": [str(imp.group.uuid)], - }, # blank values ignored - ], - batch.specs, - ) - - imp = self.create_contact_import("media/test_imports/with_empty_rows.xlsx") - imp.start() - batch = imp.batches.get() # single batch - - # row 2 nad 3 is skipped - self.assertEqual( - [ - { - "_import_row": 2, - "name": "John Doe", - "language": "eng", - "urns": ["tel:+250788123123"], - "fields": {"goats": "1", "sheep": "0"}, - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 5, - "name": "Mary Smith", - "language": "spa", - "urns": ["tel:+250788456456"], - "fields": {"goats": "3", "sheep": "5"}, - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 6, - "urns": ["tel:+250788456678"], - "groups": [str(imp.group.uuid)], - }, # blank values ignored - ], - batch.specs, - ) - - imp = self.create_contact_import("media/test_imports/with_uuid.xlsx") - imp.start() - batch = imp.batches.get() - self.assertEqual( - [ - { - "_import_row": 2, - "uuid": "f519ca1f-8513-49ba-8896-22bf0420dec7", - "name": "Joe", - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 3, - "uuid": "989975f0-3bff-43d6-82c8-a6bbc201c938", - "name": "Frank", - "groups": [str(imp.group.uuid)], - }, - ], - batch.specs, - ) - - # cells with -- mean explicit clearing of those values - imp = self.create_contact_import("media/test_imports/explicit_clearing.xlsx") - imp.start() - batch = imp.batches.get() # single batch - - self.assertEqual( - { - "_import_row": 4, - "name": "", - "language": "", - "urns": ["tel:+250788456678"], - "fields": {"goats": "", "sheep": ""}, - "groups": [str(imp.group.uuid)], - }, - batch.specs[2], - ) - - # uuids and languages converted to lowercase, case in names is preserved - imp = self.create_contact_import("media/test_imports/uppercase.xlsx") - imp.start() - batch = imp.batches.get() - self.assertEqual( - [ - { - "_import_row": 2, - "uuid": "92faa753-6faa-474a-a833-788032d0b757", - "name": "Eric Newcomer", - "language": "eng", - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 3, - "uuid": "3c11ac1f-c869-4247-a73c-9b97bff61659", - "name": "NIC POTTIER", - "language": "spa", - "groups": [str(imp.group.uuid)], - }, - ], - batch.specs, - ) - - @mock_mailroom - def test_batches_with_invalid_urn(self, mr_mocks): - imp = self.create_contact_import("media/test_imports/invalid_urn.xlsx") - imp.start() - batch = imp.batches.get() - - # invalid looking urns still passed to mailroom to decide how to handle them - self.assertEqual( - [ - {"_import_row": 2, "name": "Eric Newcomer", "urns": ["tel:+%3F"], "groups": [str(imp.group.uuid)]}, - { - "_import_row": 3, - "name": "Nic Pottier", - "urns": ["tel:2345678901234567890"], - "groups": [str(imp.group.uuid)], - }, - ], - batch.specs, - ) - - @mock_mailroom - def test_batches_with_multiple_tels(self, mr_mocks): - imp = self.create_contact_import("media/test_imports/multiple_tel_urns.xlsx") - imp.start() - batch = imp.batches.get() - - self.assertEqual( - [ - { - "_import_row": 2, - "name": "Bob", - "urns": ["tel:+250788382001", "tel:+250788382002", "tel:+250788382003"], - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 3, - "name": "Jim", - "urns": ["tel:+250788382004", "tel:+250788382005"], - "groups": [str(imp.group.uuid)], - }, - ], - batch.specs, - ) - - @mock_mailroom - def test_batches_from_xlsx(self, mr_mocks): - imp = self.create_contact_import("media/test_imports/simple.xlsx") - imp.start() - batch = imp.batches.get() - - self.assertEqual( - [ - { - "_import_row": 2, - "name": "Eric Newcomer", - "urns": ["tel:+250788382382"], - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 3, - "name": "NIC POTTIER", - "urns": ["tel:+250788383383"], - "groups": [str(imp.group.uuid)], - }, - { - "_import_row": 4, - "name": "jen newcomer", - "urns": ["tel:+250788383385"], - "groups": [str(imp.group.uuid)], - }, - ], - batch.specs, - ) - - @mock_mailroom - def test_detect_spamminess(self, mr_mocks): - imp = self.create_contact_import("media/test_imports/sequential_tels.xlsx") - imp.start() - - self.org.refresh_from_db() - self.assertTrue(self.org.is_flagged) - - with patch("temba.contacts.models.ContactImport.SEQUENTIAL_URNS_THRESHOLD", 3): - self.assertFalse(ContactImport._detect_spamminess(["tel:+593979000001", "tel:+593979000002"])) - self.assertFalse( - ContactImport._detect_spamminess( - ["tel:+593979000001", "tel:+593979000003", "tel:+593979000005", "tel:+593979000007"] - ) - ) - - self.assertTrue( - ContactImport._detect_spamminess(["tel:+593979000001", "tel:+593979000002", "tel:+593979000003"]) - ) - - # order not important - self.assertTrue( - ContactImport._detect_spamminess(["tel:+593979000003", "tel:+593979000001", "tel:+593979000002"]) - ) - - # non-numeric paths ignored - self.assertTrue( - ContactImport._detect_spamminess( - ["tel:+593979000001", "tel:ABC", "tel:+593979000002", "tel:+593979000003"] - ) - ) - - @mock_mailroom - def test_detect_spamminess_verified_org(self, mr_mocks): - # if an org is verified, no flagging occurs - self.org.verify() - - imp = self.create_contact_import("media/test_imports/sequential_tels.xlsx") - imp.start() - - self.org.refresh_from_db() - self.assertFalse(self.org.is_flagged) - - def test_data_types(self): - imp = self.create_contact_import("media/test_imports/data_formats.xlsx") - imp.start() - batch = imp.batches.get() - self.assertEqual( - [ - { - "_import_row": 2, - "uuid": "17c4388a-024f-4e67-937a-13be78a70766", - "fields": { - "a_number": "1234.5678", - "a_date": "2020-10-19T00:00:00+02:00", - "a_time": "13:17:00", - "a_datetime": "2020-10-19T13:18:00+02:00", - "price": "123.45", - }, - "groups": [str(imp.group.uuid)], - } - ], - batch.specs, - ) - - def test_parse_value(self): - imp = self.create_contact_import("media/test_imports/simple.xlsx") - kgl = ZoneInfo("Africa/Kigali") - - tests = [ - ("", ""), - (" Yes ", "Yes"), - (1234, "1234"), - (123.456, "123.456"), - (date(2020, 9, 18), "2020-09-18"), - (datetime(2020, 9, 18, 15, 45, 30, 0), "2020-09-18T15:45:30+02:00"), - (datetime(2020, 9, 18, 15, 45, 30, 0).replace(tzinfo=kgl), "2020-09-18T15:45:30+02:00"), - ] - for test in tests: - self.assertEqual(test[1], imp._parse_value(test[0], tz=kgl)) - - def test_get_default_group_name(self): - self.create_group("Testers", contacts=[]) - tests = [ - ("simple.xlsx", "Simple"), - ("testers.xlsx", "Testers 2"), # group called Testers already exists - ("contact-imports.xlsx", "Contact Imports"), - ("abc_@@é.xlsx", "Abc É"), - ("a_@@é.xlsx", "Import"), # would be too short - (f"{'x' * 100}.xlsx", "Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"), # truncated - ] - for test in tests: - self.assertEqual(test[1], ContactImport(org=self.org, original_filename=test[0]).get_default_group_name()) - - @mock_mailroom - def test_delete(self, mr_mocks): - imp = self.create_contact_import("media/test_imports/simple.xlsx") - imp.start() - imp.delete() - - self.assertEqual(0, ContactImport.objects.count()) - self.assertEqual(0, ContactImportBatch.objects.count()) - - -class ContactImportCRUDLTest(TembaTest, CRUDLTestMixin): - def test_create_and_preview(self): - create_url = reverse("contacts.contactimport_create") - - self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) - self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=["file"]) - - # try posting with nothing - response = self.client.post(create_url, {}) - self.assertFormError(response.context["form"], "file", "This field is required.") - - # try uploading an empty file - response = self.client.post(create_url, {"file": self.upload("media/test_imports/empty.xlsx")}) - self.assertFormError(response.context["form"], "file", "Import file doesn't contain any records.") - - # try uploading a valid XLSX file - response = self.client.post(create_url, {"file": self.upload("media/test_imports/simple.xlsx")}) - self.assertEqual(302, response.status_code) - - imp = ContactImport.objects.get() - self.assertEqual(self.org, imp.org) - self.assertEqual(3, imp.num_records) - self.assertRegex(imp.file.name, rf"orgs/{self.org.id}/contact_imports/[\w-]{{36}}.xlsx$") - self.assertEqual("simple.xlsx", imp.original_filename) - self.assertIsNone(imp.started_on) - self.assertIsNone(imp.group) - - preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) - read_url = reverse("contacts.contactimport_read", args=[imp.id]) - - # will have been redirected to the preview view for the new import - self.assertEqual(preview_url, response.url) - - response = self.client.get(preview_url) - self.assertContains(response, "URN:Tel") - self.assertContains(response, "name") - - response = self.client.post(preview_url, {}) - self.assertEqual(302, response.status_code) - self.assertEqual(read_url, response.url) - - imp.refresh_from_db() - self.assertIsNotNone(imp.started_on) - - # can no longer access preview URL.. will be redirected to read - response = self.client.get(preview_url) - self.assertEqual(302, response.status_code) - self.assertEqual(read_url, response.url) - - @mock_mailroom - def test_creating_new_group(self, mr_mocks): - self.login(self.admin) - imp = self.create_contact_import("media/test_imports/simple.xlsx") - preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) - read_url = reverse("contacts.contactimport_read", args=[imp.id]) - - # create some groups - self.create_group("Testers", contacts=[]) - doctors = self.create_group("Doctors", contacts=[]) - - # try creating new group but not providing a name - response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": " "}) - self.assertFormError(response.context["form"], "new_group_name", "Required.") - - # try creating new group but providing an invalid name - response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": '"Foo"'}) - self.assertFormError(response.context["form"], "new_group_name", "Invalid group name.") - - # try creating new group but providing a name of an existing group - response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": "testERs"}) - self.assertFormError(response.context["form"], "new_group_name", "Already exists.") - - # try creating new group when we've already reached our group limit - with override_settings(ORG_LIMIT_DEFAULTS={"groups": 2}): - response = self.client.post( - preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": "Import"} - ) - self.assertFormError(response.context["form"], None, "This workspace has reached its limit of 2 groups.") - - # finally create new group... - response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": "Import"}) - self.assertRedirect(response, read_url) - - new_group = ContactGroup.objects.get(name="Import") - imp.refresh_from_db() - self.assertEqual(new_group, imp.group) - - # existing group should not check for workspace limit - imp = self.create_contact_import("media/test_imports/simple.xlsx") - preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) - read_url = reverse("contacts.contactimport_read", args=[imp.id]) - with override_settings(ORG_LIMIT_DEFAULTS={"groups": 2}): - response = self.client.post( - preview_url, {"add_to_group": True, "group_mode": "E", "existing_group": doctors.id} - ) - self.assertRedirect(response, read_url) - imp.refresh_from_db() - self.assertEqual(doctors, imp.group) - - @mock_mailroom - def test_using_existing_group(self, mr_mocks): - self.login(self.admin) - imp = self.create_contact_import("media/test_imports/simple.xlsx") - preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) - read_url = reverse("contacts.contactimport_read", args=[imp.id]) - - # create some groups - self.create_field("age", "Age", ContactField.TYPE_NUMBER) - testers = self.create_group("Testers", contacts=[]) - doctors = self.create_group("Doctors", contacts=[]) - self.create_group("No Age", query='age = ""') - - # only static groups appear as options - response = self.client.get(preview_url) - self.assertEqual([doctors, testers], list(response.context["form"].fields["existing_group"].queryset)) - - # try submitting without group - response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "E", "existing_group": ""}) - self.assertFormError(response.context["form"], "existing_group", "Required.") - - # finally try with actual group... - response = self.client.post( - preview_url, {"add_to_group": True, "group_mode": "E", "existing_group": doctors.id} - ) - self.assertRedirect(response, read_url) - - imp.refresh_from_db() - self.assertEqual(doctors, imp.group) - - def test_preview_with_mappings(self): - self.create_field("age", "Age", ContactField.TYPE_NUMBER) - - imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") - preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) - - self.assertRequestDisallowed(preview_url, [None, self.user, self.agent, self.admin2]) - - # columns 4 and 5 are a non-existent field so will have controls to create a new one - self.assertUpdateFetch( - preview_url, - [self.editor, self.admin], - form_fields=[ - "add_to_group", - "group_mode", - "new_group_name", - "existing_group", - "column_5_include", - "column_5_name", - "column_5_value_type", - "column_6_include", - "column_6_name", - "column_6_value_type", - ], - ) - - # if including a new fields, can't use existing field name - response = self.client.post( - preview_url, - { - "column_5_include": True, - "column_5_name": "Goats", - "column_5_value_type": "N", - "column_6_include": True, - "column_6_name": "age", - "column_6_value_type": "N", - "add_to_group": False, - }, - ) - self.assertEqual(1, len(response.context["form"].errors)) - self.assertFormError(response.context["form"], None, "Field name for 'Field:Sheep' matches an existing field.") - - # if including a new fields, can't repeat names - response = self.client.post( - preview_url, - { - "column_5_include": True, - "column_5_name": "Goats", - "column_5_value_type": "N", - "column_6_include": True, - "column_6_name": "goats", - "column_6_value_type": "N", - "add_to_group": False, - }, - ) - self.assertEqual(1, len(response.context["form"].errors)) - self.assertFormError(response.context["form"], None, "Field name 'goats' is repeated.") - - # if including a new field, name can't be invalid - response = self.client.post( - preview_url, - { - "column_5_include": True, - "column_5_name": "Goats", - "column_5_value_type": "N", - "column_6_include": True, - "column_6_name": "#$%^@", - "column_6_value_type": "N", - "add_to_group": False, - }, - ) - self.assertEqual(1, len(response.context["form"].errors)) - self.assertFormError( - response.context["form"], None, "Field name for 'Field:Sheep' is invalid or a reserved word." - ) - - # or empty - response = self.client.post( - preview_url, - { - "column_5_include": True, - "column_5_name": "Goats", - "column_5_value_type": "N", - "column_6_include": True, - "column_6_name": "", - "column_6_value_type": "T", - "add_to_group": False, - }, - ) - self.assertEqual(1, len(response.context["form"].errors)) - self.assertFormError(response.context["form"], None, "Field name for 'Field:Sheep' can't be empty.") - - # unless you're ignoring it - response = self.client.post( - preview_url, - { - "column_5_include": True, - "column_5_name": "Goats", - "column_5_value_type": "N", - "column_6_include": False, - "column_6_name": "", - "column_6_value_type": "T", - "add_to_group": False, - }, - ) - self.assertEqual(302, response.status_code) - - # mappings will have been updated - imp.refresh_from_db() - self.assertEqual( - [ - {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, - {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, - {"header": "language", "mapping": {"type": "attribute", "name": "language"}}, - {"header": "Status", "mapping": {"type": "attribute", "name": "status"}}, - {"header": "Created On", "mapping": {"type": "ignore"}}, - { - "header": "field: goats", - "mapping": {"type": "new_field", "key": "goats", "name": "Goats", "value_type": "N"}, - }, - {"header": "Field:Sheep", "mapping": {"type": "ignore"}}, - {"header": "Group:Testers", "mapping": {"type": "ignore"}}, - ], - imp.mappings, - ) - - @patch("temba.contacts.models.ContactImport.BATCH_SIZE", 2) - def test_read(self): - imp = self.create_contact_import("media/test_imports/simple.xlsx") - imp.start() - - read_url = reverse("contacts.contactimport_read", args=[imp.id]) - - self.assertRequestDisallowed(read_url, [None, self.agent, self.admin2]) - self.assertReadFetch(read_url, [self.user, self.editor, self.admin], context_object=imp) - - -class ContactExportTest(TembaTest): - def setUp(self): - super().setUp() - - self.joe = self.create_contact(name="Joe Blow", phone="123") - self.frank = self.create_contact(name="Frank Smith", phone="1234") - - self.contactfield_1 = self.create_field("first", "First", priority=10) - self.contactfield_2 = self.create_field("second", "Second") - self.contactfield_3 = self.create_field("third", "Third", priority=20) - - def _export(self, group, search="", with_groups=()): - export = ContactExport.create(self.org, self.admin, group, search, with_groups=with_groups) - with self.mockReadOnly(assert_models={Contact, ContactURN, ContactField}): - export.perform() - - workbook = load_workbook( - filename=default_storage.open(f"orgs/{self.org.id}/contact_exports/{export.uuid}.xlsx") - ) - return workbook.worksheets, export - - @mock_mailroom - def test_export(self, mr_mocks): - # archive all our current contacts - Contact.apply_action_block(self.admin, self.org.contacts.all()) - - # make third a datetime - self.contactfield_3.value_type = ContactField.TYPE_DATETIME - self.contactfield_3.save() - - # start one of our contacts down it - contact = self.create_contact( - "Be\02n Haggerty", - phone="+12067799294", - fields={"first": "On\02e", "third": "20/12/2015 08:30"}, - last_seen_on=datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - ) - - flow = self.get_flow("color_v13") - nodes = flow.get_definition()["nodes"] - color_prompt = nodes[0] - color_split = nodes[4] - - ( - MockSessionWriter(self.joe, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ) - - # create another contact, this should sort before Ben - contact2 = self.create_contact("Adam Sumner", urns=["tel:+12067799191", "twitter:adam"], language="eng") - urns = [str(urn) for urn in contact2.get_urns()] - urns.append("mailto:adam@sumner.com") - urns.append("telegram:1234") - contact2.modify(self.admin, contact2.update_urns(urns)) - - group1 = self.create_group("Poppin Tags", [contact, contact2]) - group2 = self.create_group("Dynamic", query="tel is 1234") - group2.status = ContactGroup.STATUS_EVALUATING - group2.save() - - # create orphaned URN in scheme that no contacts have a URN for - ContactURN.objects.create(org=self.org, identity="line:12345", scheme="line", path="12345") - - def assertReimport(export): - with default_storage.open(f"orgs/{self.org.id}/contact_exports/{export.uuid}.xlsx") as exp: - with tempfile.NamedTemporaryFile(delete=False) as tmp: - tmp.write(exp.read()) - tmp.close() - - self.create_contact_import(tmp.name) - - with self.assertNumQueries(22): - sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) - self.assertEqual(2, export.num_records) - self.assertEqual("C", export.status) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:First", - "Field:Second", - "Group:Poppin Tags", - ], - [ - contact.uuid, - "Ben Haggerty", - "", - "Active", - contact.created_on, - datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - "", - "+12067799294", - "", - "", - "20-12-2015 08:30", - "One", - "", - True, - ], - [ - contact2.uuid, - "Adam Sumner", - "eng", - "Active", - contact2.created_on, - "", - "adam@sumner.com", - "+12067799191", - "1234", - "adam", - "", - "", - "", - True, - ], - ], - tz=self.org.timezone, - ) - - assertReimport(export) - - # check that notifications were created - export = Export.objects.filter(export_type=ContactExport.slug).order_by("id").last() - self.assertEqual(1, self.admin.notifications.filter(notification_type="export:finished", export=export).count()) - - # change the order of the fields - self.contactfield_2.priority = 15 - self.contactfield_2.save() - - with self.assertNumQueries(21): - sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) - self.assertEqual(2, export.num_records) - self.assertEqual("C", export.status) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - contact.uuid, - "Ben Haggerty", - "", - "Active", - contact.created_on, - datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - "", - "+12067799294", - "", - "", - "20-12-2015 08:30", - "", - "One", - True, - ], - [ - contact2.uuid, - "Adam Sumner", - "eng", - "Active", - contact2.created_on, - "", - "adam@sumner.com", - "+12067799191", - "1234", - "adam", - "", - "", - "", - True, - ], - ], - tz=self.org.timezone, - ) - - assertReimport(export) - - # more contacts do not increase the queries - contact3 = self.create_contact("Luol Deng", urns=["tel:+12078776655", "twitter:deng"]) - contact4 = self.create_contact("Stephen", urns=["tel:+12078778899", "twitter:stephen"]) - contact.urns.create(org=self.org, identity="tel:+12062233445", scheme="tel", path="+12062233445") - - # but should have additional Twitter and phone columns - with self.assertNumQueries(21): - sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) - self.assertEqual(4, export.num_records) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - contact.uuid, - "Ben Haggerty", - "", - "Active", - contact.created_on, - datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - "", - "+12067799294", - "+12062233445", - "", - "", - "20-12-2015 08:30", - "", - "One", - True, - ], - [ - contact2.uuid, - "Adam Sumner", - "eng", - "Active", - contact2.created_on, - "", - "adam@sumner.com", - "+12067799191", - "", - "1234", - "adam", - "", - "", - "", - True, - ], - [ - contact3.uuid, - "Luol Deng", - "", - "Active", - contact3.created_on, - "", - "", - "+12078776655", - "", - "", - "deng", - "", - "", - "", - False, - ], - [ - contact4.uuid, - "Stephen", - "", - "Active", - contact4.created_on, - "", - "", - "+12078778899", - "", - "", - "stephen", - "", - "", - "", - False, - ], - ], - tz=self.org.timezone, - ) - - assertReimport(export) - - # export a specified group of contacts (only Ben and Adam are in the group) - with self.assertNumQueries(21): - sheets, export = self._export(group1, with_groups=[group1]) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - contact.uuid, - "Ben Haggerty", - "", - "Active", - contact.created_on, - datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - "", - "+12067799294", - "+12062233445", - "", - "", - "20-12-2015 08:30", - "", - "One", - True, - ], - [ - contact2.uuid, - "Adam Sumner", - "eng", - "Active", - contact2.created_on, - "", - "adam@sumner.com", - "+12067799191", - "", - "1234", - "adam", - "", - "", - "", - True, - ], - ], - tz=self.org.timezone, - ) - - assertReimport(export) - - contact5 = self.create_contact("George", urns=["tel:+1234567777"], status=Contact.STATUS_STOPPED) - - # export a specified status group of contacts (Stopped) - sheets, export = self._export(self.org.groups.get(name="Stopped"), with_groups=[group1]) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - contact5.uuid, - "George", - "", - "Stopped", - contact5.created_on, - "", - "", - "1234567777", - "", - "", - "", - "", - "", - "", - False, - ], - ], - tz=self.org.timezone, - ) - - # export a search - mr_mocks.contact_export([contact2.id, contact3.id]) - with self.assertNumQueries(22): - sheets, export = self._export( - self.org.active_contacts_group, "name has adam or name has deng", with_groups=[group1] - ) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - contact2.uuid, - "Adam Sumner", - "eng", - "Active", - contact2.created_on, - "", - "adam@sumner.com", - "+12067799191", - "", - "1234", - "adam", - "", - "", - "", - True, - ], - [ - contact3.uuid, - "Luol Deng", - "", - "Active", - contact3.created_on, - "", - "", - "+12078776655", - "", - "", - "deng", - "", - "", - "", - False, - ], - ], - tz=self.org.timezone, - ) - - assertReimport(export) - - # export a search within a specified group of contacts - mr_mocks.contact_export([contact.id]) - with self.assertNumQueries(20): - sheets, export = self._export(group1, search="Hagg", with_groups=[group1]) - self.assertExcelSheet( - sheets[0], - [ - [ - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "URN:Mailto", - "URN:Tel", - "URN:Tel", - "URN:Telegram", - "URN:Twitter", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - contact.uuid, - "Ben Haggerty", - "", - "Active", - contact.created_on, - datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - "", - "+12067799294", - "+12062233445", - "", - "", - "20-12-2015 08:30", - "", - "One", - True, - ], - ], - tz=self.org.timezone, - ) - - assertReimport(export) - - # now try with an anonymous org - with self.anonymous(self.org): - sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) - self.assertExcelSheet( - sheets[0], - [ - [ - "ID", - "Scheme", - "Contact UUID", - "Name", - "Language", - "Status", - "Created On", - "Last Seen On", - "Field:Third", - "Field:Second", - "Field:First", - "Group:Poppin Tags", - ], - [ - str(contact.id), - "tel", - contact.uuid, - "Ben Haggerty", - "", - "Active", - contact.created_on, - datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), - "20-12-2015 08:30", - "", - "One", - True, - ], - [ - str(contact2.id), - "tel", - contact2.uuid, - "Adam Sumner", - "eng", - "Active", - contact2.created_on, - "", - "", - "", - "", - True, - ], - [ - str(contact3.id), - "tel", - contact3.uuid, - "Luol Deng", - "", - "Active", - contact3.created_on, - "", - "", - "", - "", - False, - ], - [ - str(contact4.id), - "tel", - contact4.uuid, - "Stephen", - "", - "Active", - contact4.created_on, - "", - "", - "", - "", - False, - ], - ], - tz=self.org.timezone, - ) - assertReimport(export) - - -class BackfillProxyFieldsTest(MigrationTest): - app = "contacts" - migrate_from = "0188_contactfield_is_proxy_alter_contactfield_is_system" - migrate_to = "0189_backfill_proxy_fields" - - OLD_SYSTEM_FIELDS = [ - {"key": "id", "name": "ID", "value_type": "N"}, - {"key": "name", "name": "Name", "value_type": "T"}, - {"key": "created_on", "name": "Created On", "value_type": "D"}, - {"key": "language", "name": "Language", "value_type": "T"}, - {"key": "last_seen_on", "name": "Last Seen On", "value_type": "D"}, - ] - - def setUpBeforeMigration(self, apps): - # make org 1 look like an org with the old system fields - self.org.fields.all().delete() - - for spec in self.OLD_SYSTEM_FIELDS: - self.org.fields.create( - is_system=True, - key=spec["key"], - name=spec["name"], - value_type=spec["value_type"], - show_in_table=False, - created_by=self.org.created_by, - modified_by=self.org.modified_by, - ) - - def test_migration(self): - self.assertEqual( - {"created_on", "last_seen_on"}, set(self.org.fields.filter(is_system=True).values_list("key", flat=True)) - ) - self.assertEqual( - {"created_on", "last_seen_on"}, set(self.org.fields.filter(is_proxy=True).values_list("key", flat=True)) - ) - self.assertEqual( - {"created_on", "last_seen_on"}, set(self.org2.fields.filter(is_system=True).values_list("key", flat=True)) - ) - self.assertEqual( - {"created_on", "last_seen_on"}, set(self.org2.fields.filter(is_proxy=True).values_list("key", flat=True)) - ) diff --git a/temba/contacts/tests/__init__.py b/temba/contacts/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/temba/contacts/tests/test_contact.py b/temba/contacts/tests/test_contact.py new file mode 100644 index 00000000000..821492497f5 --- /dev/null +++ b/temba/contacts/tests/test_contact.py @@ -0,0 +1,977 @@ +from datetime import timedelta +from decimal import Decimal +from unittest.mock import call, patch +from uuid import UUID + +from django.db.models import Value as DbValue +from django.db.models.functions import Concat, Substr +from django.urls import reverse +from django.utils import timezone + +from temba import mailroom +from temba.campaigns.models import Campaign, CampaignEvent, EventFire +from temba.channels.models import ChannelEvent +from temba.contacts.models import URN, Contact, ContactField, ContactGroup, ContactURN +from temba.flows.models import Flow +from temba.locations.models import AdminBoundary +from temba.mailroom import modifiers +from temba.msgs.models import Msg, SystemLabel +from temba.orgs.models import Org +from temba.schedules.models import Schedule +from temba.tests import TembaTest, mock_mailroom +from temba.tests.engine import MockSessionWriter +from temba.tickets.models import Ticket + + +class ContactTest(TembaTest): + def setUp(self): + super().setUp() + + self.user1 = self.create_user("nash") + + self.joe = self.create_contact(name="Joe Blow", urns=["twitter:blow80", "tel:+250781111111"]) + self.frank = self.create_contact(name="Frank Smith", phone="+250782222222") + self.billy = self.create_contact(name="Billy Nophone") + self.voldemort = self.create_contact(phone="+250768383383") + + # create an orphaned URN + ContactURN.objects.create( + org=self.org, scheme="tel", path="+250788888888", identity="tel:+250788888888", priority=50 + ) + + # create an deleted contact + self.jim = self.create_contact(name="Jim") + self.jim.release(self.user, deindex=False) + + # create contact in other org + self.other_org_contact = self.create_contact(name="Fred", phone="+250768111222", org=self.org2) + + def test_contact_notes(self): + note_text = "This is note" + + # create 10 notes + for i in range(10): + self.joe.set_note(self.user, f"{note_text} {i+1}") + + notes = self.joe.notes.all().order_by("id") + + # we should only have five notes after pruning + self.assertEqual(5, notes.count()) + + # check that the oldest notes are the ones that were pruned + self.assertEqual("This is note 6", notes.first().text) + + @mock_mailroom + def test_block_and_stop(self, mr_mocks): + self.joe.block(self.admin) + self.joe.stop(self.admin) + self.joe.restore(self.admin) + + self.assertEqual( + [ + call(self.org, self.admin, [self.joe], [modifiers.Status(status="blocked")]), + call(self.org, self.admin, [self.joe], [modifiers.Status(status="stopped")]), + call(self.org, self.admin, [self.joe], [modifiers.Status(status="active")]), + ], + mr_mocks.calls["contact_modify"], + ) + + @mock_mailroom + def test_open_ticket(self, mock_contact_modify): + mock_contact_modify.return_value = {self.joe.id: {"contact": {}, "events": []}} + + ticket = self.joe.open_ticket( + self.admin, topic=self.org.default_ticket_topic, assignee=self.agent, note="Looks sus" + ) + + self.assertEqual(self.org.default_ticket_topic, ticket.topic) + self.assertEqual("Looks sus", ticket.events.get(event_type="O").note) + + @mock_mailroom + def test_interrupt(self, mr_mocks): + # noop when contact not in a flow + self.assertFalse(self.joe.interrupt(self.admin)) + + flow = self.create_flow("Test") + MockSessionWriter(self.joe, flow).wait().save() + + self.assertTrue(self.joe.interrupt(self.admin)) + + @mock_mailroom + def test_release(self, mr_mocks): + # create a contact with a message + old_contact = self.create_contact("Jose", phone="+12065552000") + self.create_incoming_msg(old_contact, "hola mundo") + urn = old_contact.get_urn() + self.create_channel_event(self.channel, urn.identity, ChannelEvent.TYPE_CALL_IN_MISSED) + + self.create_ticket(old_contact) + + ivr_flow = self.get_flow("ivr") + msg_flow = self.get_flow("favorites_v13") + + self.create_incoming_call(msg_flow, old_contact) + + # steal his urn into a new contact + contact = self.create_contact("Joe", urns=["twitter:tweettweet"], fields={"gender": "Male", "age": 40}) + urn.contact = contact + urn.save(update_fields=("contact",)) + group = self.create_group("Test Group", contacts=[contact]) + + contact2 = self.create_contact("Billy", urns=["tel:1234567"]) + + # create scheduled and regular broadcasts which send to both contacts + schedule = Schedule.create(self.org, timezone.now(), Schedule.REPEAT_DAILY) + bcast1 = self.create_broadcast( + self.admin, {"eng": {"text": "Test"}}, contacts=[contact, contact2], schedule=schedule + ) + bcast2 = self.create_broadcast(self.admin, {"eng": {"text": "Test"}}, contacts=[contact, contact2]) + + flow_nodes = msg_flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[2] + beer_prompt = flow_nodes[3] + beer_split = flow_nodes[5] + name_prompt = flow_nodes[6] + name_split = flow_nodes[7] + + ( + MockSessionWriter(contact, msg_flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "red")) + .visit(beer_prompt) + .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "primus")) + .visit(name_prompt) + .send_msg("Lastly, what is your name?", self.channel) + .visit(name_split) + .wait() + .save() + ) + + campaign = Campaign.create(self.org, self.admin, "Reminders", group) + joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) + event = CampaignEvent.create_message_event(self.org, self.admin, campaign, joined, 2, unit="D", message="Hi") + EventFire.objects.create(event=event, contact=contact, scheduled=timezone.now() + timedelta(days=2)) + + self.create_incoming_call(msg_flow, contact) + + # give contact an open and a closed ticket + self.create_ticket(contact) + self.create_ticket(contact, closed_on=timezone.now()) + + self.assertEqual(1, group.contacts.all().count()) + self.assertEqual(1, contact.calls.all().count()) + self.assertEqual(2, contact.addressed_broadcasts.all().count()) + self.assertEqual(2, contact.urns.all().count()) + self.assertEqual(2, contact.runs.all().count()) + self.assertEqual(7, contact.msgs.all().count()) + self.assertEqual(2, len(contact.fields)) + self.assertEqual(1, contact.campaign_fires.count()) + + self.assertEqual(2, Ticket.get_status_count(self.org, self.org.topics.all(), Ticket.STATUS_OPEN)) + self.assertEqual(1, Ticket.get_status_count(self.org, self.org.topics.all(), Ticket.STATUS_CLOSED)) + + # first try releasing with _full_release patched so we can check the state of the contact before the task + # to do a full release has kicked off + with patch("temba.contacts.models.Contact._full_release"): + contact.release(self.admin) + + self.assertEqual(2, contact.urns.all().count()) + for urn in contact.urns.all(): + UUID(urn.path, version=4) + self.assertEqual(URN.DELETED_SCHEME, urn.scheme) + + # tickets unchanged + self.assertEqual(2, contact.tickets.count()) + + # a new contact arrives with those urns + new_contact = self.create_contact("URN Thief", urns=["tel:+12065552000", "twitter:tweettweet"]) + self.assertEqual(2, new_contact.urns.all().count()) + + self.assertEqual({contact2}, set(bcast1.contacts.all())) + self.assertEqual({contact, contact2}, set(bcast2.contacts.all())) + + # now lets go for a full release + contact.release(self.admin) + + contact.refresh_from_db() + self.assertEqual(0, group.contacts.all().count()) + self.assertEqual(0, contact.calls.all().count()) + self.assertEqual(0, contact.addressed_broadcasts.all().count()) + self.assertEqual(0, contact.urns.all().count()) + self.assertEqual(0, contact.runs.all().count()) + self.assertEqual(0, contact.msgs.all().count()) + self.assertEqual(0, contact.campaign_fires.count()) + + # tickets deleted (only for this contact) + self.assertEqual(0, contact.tickets.count()) + self.assertEqual(1, Ticket.get_status_count(self.org, self.org.topics.all(), Ticket.STATUS_OPEN)) + self.assertEqual(0, Ticket.get_status_count(self.org, self.org.topics.all(), Ticket.STATUS_CLOSED)) + + # contact who used to own our urn had theirs released too + self.assertEqual(0, old_contact.calls.all().count()) + self.assertEqual(0, old_contact.msgs.all().count()) + + self.assertIsNone(contact.fields) + self.assertIsNone(contact.name) + + # nope, we aren't paranoid or anything + Org.objects.get(id=self.org.id) + Flow.objects.get(id=msg_flow.id) + Flow.objects.get(id=ivr_flow.id) + self.assertEqual(1, Ticket.objects.count()) + + @mock_mailroom + def test_status_changes_and_release(self, mr_mocks): + flow = self.create_flow("Test") + msg1 = self.create_incoming_msg(self.joe, "Test 1") + msg2 = self.create_incoming_msg(self.joe, "Test 2", flow=flow) + msg3 = self.create_incoming_msg(self.joe, "Test 3", visibility="A") + label = self.create_label("Interesting") + label.toggle_label([msg1, msg2, msg3], add=True) + static_group = self.create_group("Just Joe", [self.joe]) + + msg_counts = SystemLabel.get_counts(self.org) + self.assertEqual(1, msg_counts[SystemLabel.TYPE_INBOX]) + self.assertEqual(1, msg_counts[SystemLabel.TYPE_FLOWS]) + self.assertEqual(1, msg_counts[SystemLabel.TYPE_ARCHIVED]) + + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 4, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + self.assertEqual(set(label.msgs.all()), {msg1, msg2, msg3}) + self.assertEqual(set(static_group.contacts.all()), {self.joe}) + + self.joe.stop(self.user) + + # check that joe is now stopped + self.joe = Contact.objects.get(pk=self.joe.pk) + self.assertEqual(Contact.STATUS_STOPPED, self.joe.status) + self.assertTrue(self.joe.is_active) + + # and added to stopped group + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 1, + Contact.STATUS_ARCHIVED: 0, + }, + ) + self.assertEqual(set(static_group.contacts.all()), set()) + + self.joe.block(self.user) + + # check that joe is now blocked instead of stopped + self.joe.refresh_from_db() + self.assertEqual(Contact.STATUS_BLOCKED, self.joe.status) + self.assertTrue(self.joe.is_active) + + # and that he's been removed from the all and failed groups, and added to the blocked group + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 1, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + # and removed from all groups + self.assertEqual(set(static_group.contacts.all()), set()) + + # but his messages are unchanged + self.assertEqual(2, Msg.objects.filter(contact=self.joe, visibility="V").count()) + msg_counts = SystemLabel.get_counts(self.org) + self.assertEqual(1, msg_counts[SystemLabel.TYPE_INBOX]) + self.assertEqual(1, msg_counts[SystemLabel.TYPE_FLOWS]) + self.assertEqual(1, msg_counts[SystemLabel.TYPE_ARCHIVED]) + + self.joe.archive(self.admin) + + # check that joe is now archived + self.joe.refresh_from_db() + self.assertEqual(Contact.STATUS_ARCHIVED, self.joe.status) + self.assertTrue(self.joe.is_active) + + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 1, + }, + ) + + self.joe.restore(self.admin) + + # check that joe is now neither blocked or stopped + self.joe = Contact.objects.get(pk=self.joe.pk) + self.assertEqual(Contact.STATUS_ACTIVE, self.joe.status) + self.assertTrue(self.joe.is_active) + + # and that he's been removed from the blocked group, and put back in the all and failed groups + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 4, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + self.joe.release(self.user) + + # check that joe has been released (doesn't change his status) + self.joe = Contact.objects.get(pk=self.joe.pk) + self.assertEqual(Contact.STATUS_ACTIVE, self.joe.status) + self.assertFalse(self.joe.is_active) + + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + # joe's messages should be inactive, blank and have no labels + self.assertEqual(0, Msg.objects.filter(contact=self.joe, visibility="V").count()) + self.assertEqual(0, Msg.objects.filter(contact=self.joe).exclude(text="").count()) + self.assertEqual(0, label.msgs.count()) + + msg_counts = SystemLabel.get_counts(self.org) + self.assertEqual(0, msg_counts[SystemLabel.TYPE_INBOX]) + self.assertEqual(0, msg_counts[SystemLabel.TYPE_FLOWS]) + self.assertEqual(0, msg_counts[SystemLabel.TYPE_ARCHIVED]) + + # and he shouldn't be in any groups + self.assertEqual(set(static_group.contacts.all()), set()) + + # or have any URNs + self.assertEqual(0, ContactURN.objects.filter(contact=self.joe).count()) + + # blocking and failing an inactive contact won't change groups + self.joe.block(self.user) + self.joe.stop(self.user) + + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + # we don't let users undo releasing a contact... but if we have to do it for some reason + self.joe.is_active = True + self.joe.save(update_fields=("is_active",)) + + # check joe goes into the appropriate groups + self.assertEqual( + Contact.get_status_counts(self.org), + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 1, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + def test_contact_display(self): + self.assertEqual("Joe Blow", self.joe.get_display(org=self.org, formatted=False)) + self.assertEqual("Joe Blow", self.joe.get_display()) + self.assertEqual("+250768383383", self.voldemort.get_display(org=self.org, formatted=False)) + self.assertEqual("0768 383 383", self.voldemort.get_display()) + self.assertEqual("Billy Nophone", self.billy.get_display()) + + self.assertEqual("0781 111 111", self.joe.get_urn_display(scheme=URN.TEL_SCHEME)) + self.assertEqual("blow80", self.joe.get_urn_display(org=self.org, formatted=False)) + self.assertEqual("blow80", self.joe.get_urn_display()) + self.assertEqual("+250768383383", self.voldemort.get_urn_display(org=self.org, formatted=False)) + self.assertEqual( + "+250768383383", self.voldemort.get_urn_display(org=self.org, formatted=False, international=True) + ) + self.assertEqual("+250 768 383 383", self.voldemort.get_urn_display(org=self.org, international=True)) + self.assertEqual("0768 383 383", self.voldemort.get_urn_display()) + self.assertEqual("", self.billy.get_urn_display()) + + self.assertEqual("Joe Blow", str(self.joe)) + self.assertEqual("0768 383 383", str(self.voldemort)) + self.assertEqual("Billy Nophone", str(self.billy)) + + with self.anonymous(self.org): + self.assertEqual("Joe Blow", self.joe.get_display(org=self.org, formatted=False)) + self.assertEqual("Joe Blow", self.joe.get_display()) + self.assertEqual("%010d" % self.voldemort.pk, self.voldemort.get_display()) + self.assertEqual("Billy Nophone", self.billy.get_display()) + + self.assertEqual(ContactURN.ANON_MASK, self.joe.get_urn_display(org=self.org, formatted=False)) + self.assertEqual(ContactURN.ANON_MASK, self.joe.get_urn_display()) + self.assertEqual(ContactURN.ANON_MASK, self.voldemort.get_urn_display()) + self.assertEqual("", self.billy.get_urn_display()) + self.assertEqual("", self.billy.get_urn_display(scheme=URN.TEL_SCHEME)) + + self.assertEqual("Joe Blow", str(self.joe)) + self.assertEqual("%010d" % self.voldemort.pk, str(self.voldemort)) + self.assertEqual("Billy Nophone", str(self.billy)) + + def test_bulk_urn_cache_initialize(self): + self.joe.refresh_from_db() + self.billy.refresh_from_db() + + contacts = (self.joe, self.frank, self.billy) + Contact.bulk_urn_cache_initialize(contacts) + + with self.assertNumQueries(0): + self.assertEqual(["twitter:blow80", "tel:+250781111111"], [u.urn for u in self.joe.get_urns()]) + self.assertEqual(["twitter:blow80", "tel:+250781111111"], [u.urn for u in getattr(self.joe, "_urns_cache")]) + self.assertEqual(["tel:+250782222222"], [u.urn for u in self.frank.get_urns()]) + self.assertEqual([], [u.urn for u in self.billy.get_urns()]) + + @mock_mailroom + def test_bulk_inspect(self, mr_mocks): + self.assertEqual({}, Contact.bulk_inspect([])) + self.assertEqual( + { + self.joe: { + "urns": [ + { + "channel": {"uuid": str(self.channel.uuid), "name": "Test Channel"}, + "scheme": "tel", + "path": "+250781111111", + "display": "", + }, + {"channel": None, "scheme": "twitter", "path": "blow80", "display": ""}, + ] + }, + self.billy: {"urns": []}, + }, + Contact.bulk_inspect([self.joe, self.billy]), + ) + + @mock_mailroom + def test_omnibox(self, mr_mocks): + omnibox_url = reverse("contacts.contact_omnibox") + + # add a group with members and an empty group + self.create_field("gender", "Gender") + open_tickets = self.org.groups.get(name="Open Tickets") + joe_and_frank = self.create_group("Joe and Frank", [self.joe, self.frank]) + nobody = self.create_group("Nobody", []) + + men = self.create_group("Men", query="gender=M") + ContactGroup.objects.filter(id=men.id).update(status=ContactGroup.STATUS_READY) + + # a group which is being re-evaluated and shouldn't appear in any omnibox results + unready = self.create_group("Group being re-evaluated...", query="gender=M") + unready.status = ContactGroup.STATUS_EVALUATING + unready.save(update_fields=("status",)) + + # Postgres will defer to strcoll for ordering which even for en_US.UTF-8 will return different results on OSX + # and Ubuntu. To keep ordering consistent for this test, we don't let URNs start with + + # (see http://postgresql.nabble.com/a-strange-order-by-behavior-td4513038.html) + ContactURN.objects.filter(path__startswith="+").update( + path=Substr("path", 2), identity=Concat(DbValue("tel:"), Substr("path", 2)) + ) + + self.login(self.admin) + + def omnibox_request(query: str): + response = self.client.get(omnibox_url + query) + return response.json()["results"] + + # mock mailroom to return an error + mr_mocks.exception(mailroom.QueryValidationException("ooh that doesn't look right", "syntax")) + + # error is swallowed and we show no results + self.assertEqual([], omnibox_request("?search=-123`213")) + + # lookup specific contacts + self.assertEqual( + [ + {"id": str(self.billy.uuid), "name": "Billy Nophone", "type": "contact", "urn": ""}, + {"id": str(self.joe.uuid), "name": "Joe Blow", "type": "contact", "urn": "blow80"}, + ], + omnibox_request(f"?c={self.joe.uuid},{self.billy.uuid}"), + ) + + # lookup specific groups + self.assertEqual( + [ + {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, + {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, + ], + omnibox_request(f"?g={joe_and_frank.uuid},{men.uuid}"), + ) + + # empty query just returns up to 25 groups A-Z + with self.assertNumQueries(10): + self.assertEqual( + [ + {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, + {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, + {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, + {"id": str(open_tickets.uuid), "name": "Open Tickets", "type": "group", "count": 0}, + ], + omnibox_request(""), + ) + + with self.assertNumQueries(13): + mr_mocks.contact_search(query='name ~ "250" OR urn ~ "250"', total=2, contacts=[self.billy, self.frank]) + + self.assertEqual( + [ + {"id": str(self.billy.uuid), "name": "Billy Nophone", "type": "contact", "urn": ""}, + {"id": str(self.frank.uuid), "name": "Frank Smith", "type": "contact", "urn": "250782222222"}, + ], + omnibox_request("?search=250"), + ) + + with self.assertNumQueries(14): + mr_mocks.contact_search(query='name ~ "FRA" OR urn ~ "FRA"', total=1, contacts=[self.frank]) + + self.assertEqual( + [ + {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, + {"id": str(self.frank.uuid), "name": "Frank Smith", "type": "contact", "urn": "250782222222"}, + ], + omnibox_request("?search=FRA"), + ) + + # specify type filter g (all groups) + self.assertEqual( + [ + {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, + {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, + {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, + {"id": str(open_tickets.uuid), "name": "Open Tickets", "type": "group", "count": 0}, + ], + omnibox_request("?types=g"), + ) + + # specify type filter s (non-query groups) + self.assertEqual( + [ + {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, + {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, + ], + omnibox_request("?types=s"), + ) + + with self.anonymous(self.org): + self.assertEqual( + [ + {"id": str(joe_and_frank.uuid), "name": "Joe and Frank", "type": "group", "count": 2}, + {"id": str(men.uuid), "name": "Men", "type": "group", "count": 0}, + {"id": str(nobody.uuid), "name": "Nobody", "type": "group", "count": 0}, + {"id": str(open_tickets.uuid), "name": "Open Tickets", "type": "group", "count": 0}, + ], + omnibox_request(""), + ) + + mr_mocks.contact_search(query='name ~ "Billy"', total=1, contacts=[self.billy]) + + self.assertEqual( + [ + {"id": str(self.billy.uuid), "name": "Billy Nophone", "type": "contact"}, + ], + omnibox_request("?search=Billy"), + ) + + # exclude blocked and stopped contacts + self.joe.block(self.admin) + self.frank.stop(self.admin) + + # lookup by contact uuids + self.assertEqual(omnibox_request("?c=%s,%s" % (self.joe.uuid, self.frank.uuid)), []) + + def test_get_scheduled_messages(self): + just_joe = self.create_group("Just Joe", [self.joe]) + + self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) + + broadcast = self.create_broadcast(self.admin, {"eng": {"text": "Hello"}}, contacts=[self.frank]) + self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) + + broadcast.contacts.add(self.joe) + + self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) + + schedule_time = timezone.now() + timedelta(days=2) + broadcast.schedule = Schedule.create(self.org, schedule_time, Schedule.REPEAT_NEVER) + broadcast.save(update_fields=("schedule",)) + + self.assertEqual(self.joe.get_scheduled_broadcasts().count(), 1) + self.assertIn(broadcast, self.joe.get_scheduled_broadcasts()) + + broadcast.contacts.remove(self.joe) + self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) + + broadcast.groups.add(just_joe) + self.assertEqual(self.joe.get_scheduled_broadcasts().count(), 1) + self.assertIn(broadcast, self.joe.get_scheduled_broadcasts()) + + broadcast.groups.remove(just_joe) + self.assertEqual(0, self.joe.get_scheduled_broadcasts().count()) + + @mock_mailroom + def test_contacts_search(self, mr_mocks): + search_url = reverse("contacts.contact_search") + self.login(self.admin) + + mr_mocks.contact_search("Frank", cleaned='name ~ "Frank"', contacts=[self.frank]) + + response = self.client.get(search_url + "?search=Frank") + self.assertEqual(200, response.status_code) + results = response.json() + + # check that we get a total and a sample + self.assertEqual(1, results["total"]) + self.assertEqual(1, len(results["sample"])) + self.assertEqual("+250 782 222 222", results["sample"][0]["primary_urn_formatted"]) + + # our query should get expanded into a proper query + self.assertEqual('name ~ "Frank"', results["query"]) + + # check no primary urn + self.frank.urns.all().delete() + response = self.client.get(search_url + "?search=Frank") + self.assertEqual(200, response.status_code) + results = response.json() + self.assertEqual("--", results["sample"][0]["primary_urn_formatted"]) + + # no query, no results + response = self.client.get(search_url) + results = response.json() + self.assertEqual(0, results["total"]) + + mr_mocks.exception(mailroom.QueryValidationException("mismatched input at ", "syntax")) + + # bogus query + response = self.client.get(search_url + '?search=name="notclosed') + results = response.json() + self.assertEqual("Invalid query syntax.", results["error"]) + self.assertEqual(0, results["total"]) + + # if we query a field, it should show up in our field dict + age = self.create_field("age", "Age", ContactField.TYPE_NUMBER) + + mr_mocks.contact_search("age>32", cleaned='age > 32"', contacts=[self.frank], fields=[age]) + + response = self.client.get(search_url + "?search=age>32") + results = response.json() + self.assertEqual("Age", results["fields"][str(age.uuid)]["label"]) + + @mock_mailroom + def test_update_status(self, mr_mocks): + self.login(self.admin) + + self.assertEqual(Contact.STATUS_ACTIVE, self.joe.status) + + for status, _ in Contact.STATUS_CHOICES: + self.client.post(reverse("contacts.contact_update", args=[self.joe.id]), {"status": status}) + + self.joe.refresh_from_db() + self.assertEqual(status, self.joe.status) + + def test_update(self): + # if new values don't differ from current values.. no modifications + self.assertEqual([], self.joe.update(name="Joe Blow", language="")) + + # change language + self.assertEqual([modifiers.Language(language="eng")], self.joe.update(name="Joe Blow", language="eng")) + + self.joe.language = "eng" + self.joe.save(update_fields=("language",)) + + # change name + self.assertEqual([modifiers.Name(name="Joseph Blow")], self.joe.update(name="Joseph Blow", language="eng")) + + # change both name and language + self.assertEqual( + [modifiers.Name(name="Joseph Blower"), modifiers.Language(language="spa")], + self.joe.update(name="Joseph Blower", language="spa"), + ) + + @mock_mailroom + def test_update_static_groups(self, mr_mocks): + # create some static groups + spammers = self.create_group("Spammers", []) + testers = self.create_group("Testers", []) + customers = self.create_group("Customers", []) + + self.assertEqual(set(spammers.contacts.all()), set()) + self.assertEqual(set(testers.contacts.all()), set()) + self.assertEqual(set(customers.contacts.all()), set()) + + # add to 2 static groups + mods = self.joe.update_static_groups([spammers, testers]) + self.assertEqual( + [ + modifiers.Groups( + modification="add", + groups=[ + modifiers.GroupRef(uuid=spammers.uuid, name="Spammers"), + modifiers.GroupRef(uuid=testers.uuid, name="Testers"), + ], + ), + ], + mods, + ) + + self.joe.modify(self.admin, mods) + + # remove from one and add to another + mods = self.joe.update_static_groups([testers, customers]) + + self.assertEqual( + [ + modifiers.Groups( + modification="remove", groups=[modifiers.GroupRef(uuid=spammers.uuid, name="Spammers")] + ), + modifiers.Groups( + modification="add", groups=[modifiers.GroupRef(uuid=customers.uuid, name="Customers")] + ), + ], + mods, + ) + + @mock_mailroom + def test_bulk_modify_with_no_contacts(self, mr_mocks): + Contact.bulk_modify(self.admin, [], [modifiers.Language(language="spa")]) + + # just a NOOP + self.assertEqual([], mr_mocks.calls["contact_modify"]) + + @mock_mailroom + def test_contact_model(self, mr_mocks): + contact = self.create_contact(name="Boy", phone="12345") + self.assertEqual(contact.get_display(), "Boy") + + contact3 = self.create_contact(name=None, phone="0788111222") + self.channel.country = "RW" + self.channel.save() + + normalized = contact3.get_urn(URN.TEL_SCHEME).ensure_number_normalization(self.channel) + self.assertEqual(normalized.path, "+250788111222") + + contact4 = self.create_contact(name=None, phone="0788333444") + normalized = contact4.get_urn(URN.TEL_SCHEME).ensure_number_normalization(self.channel) + self.assertEqual(normalized.path, "+250788333444") + + contact5 = self.create_contact(name="Jimmy", phone="+250788333555") + mods = contact5.update_urns(["twitter:jimmy_woot", "tel:0788333666"]) + contact5.modify(self.user, mods) + + # check old phone URN still existing but was detached + self.assertIsNone(ContactURN.objects.get(identity="tel:+250788333555").contact) + + # check new URNs were created and attached + self.assertEqual(contact5, ContactURN.objects.get(identity="tel:+250788333666").contact) + self.assertEqual(contact5, ContactURN.objects.get(identity="twitter:jimmy_woot").contact) + + # check twitter URN takes priority if you don't specify scheme + self.assertEqual("twitter:jimmy_woot", str(contact5.get_urn())) + self.assertEqual("twitter:jimmy_woot", str(contact5.get_urn(schemes=[URN.TWITTER_SCHEME]))) + self.assertEqual("tel:+250788333666", str(contact5.get_urn(schemes=[URN.TEL_SCHEME]))) + self.assertIsNone(contact5.get_urn(schemes=["email"])) + self.assertIsNone(contact5.get_urn(schemes=["facebook"])) + + def test_field_json(self): + self.setUpLocations() + + # simple text field + self.set_contact_field(self.joe, "dog", "Chef") + self.joe.refresh_from_db() + dog_uuid = str(ContactField.user_fields.get(key="dog").uuid) + + self.assertEqual(self.joe.fields, {dog_uuid: {"text": "Chef"}}) + + self.set_contact_field(self.joe, "dog", "") + self.joe.refresh_from_db() + self.assertEqual(self.joe.fields, {}) + + # numeric field value + self.set_contact_field(self.joe, "dog", "23.00") + self.joe.refresh_from_db() + self.assertEqual(self.joe.fields, {dog_uuid: {"text": "23.00", "number": 23}}) + + # numeric field value + self.set_contact_field(self.joe, "dog", "37.27903") + self.joe.refresh_from_db() + self.assertEqual(self.joe.fields, {dog_uuid: {"text": "37.27903", "number": Decimal("37.27903")}}) + + # numeric field values that could be NaN, we don't support that + self.set_contact_field(self.joe, "dog", "NaN") + self.joe.refresh_from_db() + self.assertEqual(self.joe.fields, {dog_uuid: {"text": "NaN"}}) + + # datetime instead + self.set_contact_field(self.joe, "dog", "2018-03-05T02:31:00.000Z") + self.joe.refresh_from_db() + self.assertEqual( + self.joe.fields, {dog_uuid: {"text": "2018-03-05T02:31:00.000Z", "datetime": "2018-03-05T04:31:00+02:00"}} + ) + + # setting another field doesn't ruin anything + self.set_contact_field(self.joe, "cat", "Rando") + self.joe.refresh_from_db() + cat_uuid = str(ContactField.user_fields.get(key="cat").uuid) + self.assertEqual( + self.joe.fields, + { + dog_uuid: {"text": "2018-03-05T02:31:00.000Z", "datetime": "2018-03-05T04:31:00+02:00"}, + cat_uuid: {"text": "Rando"}, + }, + ) + + # setting a fully qualified path parses to that level, regardless of field type + self.set_contact_field(self.joe, "cat", "Rwanda > Kigali City") + self.joe.refresh_from_db() + self.assertEqual( + self.joe.fields, + { + dog_uuid: {"text": "2018-03-05T02:31:00.000Z", "datetime": "2018-03-05T04:31:00+02:00"}, + cat_uuid: {"text": "Rwanda > Kigali City", "state": "Rwanda > Kigali City"}, + }, + ) + + # clear our previous fields + self.set_contact_field(self.joe, "dog", "") + self.assertEqual(self.joe.fields, {cat_uuid: {"text": "Rwanda > Kigali City", "state": "Rwanda > Kigali City"}}) + self.joe.refresh_from_db() + + self.set_contact_field(self.joe, "cat", "") + self.joe.refresh_from_db() + + # change a field to an invalid field value type + self.set_contact_field(self.joe, "cat", "xx") + ContactField.user_fields.filter(key="cat").update(value_type="Z") + bad_field = ContactField.user_fields.get(key="cat") + + with self.assertRaises(KeyError): + self.joe.get_field_serialized(bad_field) + + with self.assertRaises(KeyError): + self.joe.get_field_value(bad_field) + + def test_field_values(self): + self.setUpLocations() + + registration_field = self.create_field( + "registration_date", "Registration Date", value_type=ContactField.TYPE_DATETIME + ) + weight_field = self.create_field("weight", "Weight", value_type=ContactField.TYPE_NUMBER) + color_field = self.create_field("color", "Color", value_type=ContactField.TYPE_TEXT) + state_field = self.create_field("state", "State", value_type=ContactField.TYPE_STATE) + + # none value instances + self.assertEqual(self.joe.get_field_serialized(weight_field), None) + self.assertEqual(self.joe.get_field_display(weight_field), "") + self.assertEqual(self.joe.get_field_serialized(registration_field), None) + self.assertEqual(self.joe.get_field_display(registration_field), "") + + self.set_contact_field(self.joe, "registration_date", "2014-12-31T01:04:00Z") + self.set_contact_field(self.joe, "weight", "75.888888") + self.set_contact_field(self.joe, "color", "green") + self.set_contact_field(self.joe, "state", "kigali city") + + self.assertEqual(self.joe.get_field_serialized(registration_field), "2014-12-31T03:04:00+02:00") + + self.assertEqual(self.joe.get_field_serialized(weight_field), "75.888888") + self.assertEqual(self.joe.get_field_display(weight_field), "75.888888") + + self.set_contact_field(self.joe, "weight", "0") + self.assertEqual(self.joe.get_field_serialized(weight_field), "0") + self.assertEqual(self.joe.get_field_display(weight_field), "0") + + # passing something non-numeric to a decimal field + self.set_contact_field(self.joe, "weight", "xxx") + self.assertEqual(self.joe.get_field_serialized(weight_field), None) + self.assertEqual(self.joe.get_field_display(weight_field), "") + + self.assertEqual(self.joe.get_field_serialized(state_field), "Rwanda > Kigali City") + self.assertEqual(self.joe.get_field_display(state_field), "Kigali City") + + self.assertEqual(self.joe.get_field_serialized(color_field), "green") + self.assertEqual(self.joe.get_field_display(color_field), "green") + + # can fetch proxy fields too + created_on = self.org.fields.get(key="created_on") + last_seen_on = self.org.fields.get(key="last_seen_on") + + self.assertEqual(self.joe.get_field_display(created_on), self.org.format_datetime(self.joe.created_on)) + self.assertEqual(self.joe.get_field_display(last_seen_on), "") + + def test_set_location_fields(self): + self.setUpLocations() + + district_field = self.create_field("district", "District", value_type=ContactField.TYPE_DISTRICT) + not_state_field = self.create_field("not_state", "Not State", value_type=ContactField.TYPE_TEXT) + + # add duplicate district in different states + east_province = AdminBoundary.create(osm_id="R005", name="East Province", level=1, parent=self.country) + AdminBoundary.create(osm_id="R004", name="Remera", level=2, parent=east_province) + kigali = AdminBoundary.objects.get(name="Kigali City") + AdminBoundary.create(osm_id="R003", name="Remera", level=2, parent=kigali) + + joe = Contact.objects.get(pk=self.joe.pk) + self.set_contact_field(joe, "district", "Remera") + + # empty because it is ambiguous + self.assertFalse(joe.get_field_value(district_field)) + + state_field = self.create_field("state", "State", value_type=ContactField.TYPE_STATE) + + self.set_contact_field(joe, "state", "Kigali city") + self.assertEqual("Kigali City", joe.get_field_display(state_field)) + self.assertEqual("Rwanda > Kigali City", joe.get_field_serialized(state_field)) + + # test that we don't normalize non-location fields + self.set_contact_field(joe, "not_state", "kigali city") + self.assertEqual("kigali city", joe.get_field_display(not_state_field)) + self.assertEqual("kigali city", joe.get_field_serialized(not_state_field)) + + self.set_contact_field(joe, "district", "Remera") + self.assertEqual("Remera", joe.get_field_display(district_field)) + self.assertEqual("Rwanda > Kigali City > Remera", joe.get_field_serialized(district_field)) + + def test_set_location_ward_fields(self): + self.setUpLocations() + + state = AdminBoundary.create(osm_id="3710302", name="Kano", level=1, parent=self.country) + district = AdminBoundary.create(osm_id="3710307", name="Bichi", level=2, parent=state) + AdminBoundary.create(osm_id="3710377", name="Bichi", level=3, parent=district) + + self.create_field("state", "State", value_type=ContactField.TYPE_STATE) + self.create_field("district", "District", value_type=ContactField.TYPE_DISTRICT) + ward = self.create_field("ward", "Ward", value_type=ContactField.TYPE_WARD) + + jemila = self.create_contact( + name="Jemila Alley", + urns=["tel:123", "twitter:fulani_p"], + fields={"state": "kano", "district": "bichi", "ward": "bichi"}, + ) + self.assertEqual(jemila.get_field_serialized(ward), "Rwanda > Kano > Bichi > Bichi") diff --git a/temba/contacts/tests/test_contactcrudl.py b/temba/contacts/tests/test_contactcrudl.py new file mode 100644 index 00000000000..889b4afcc27 --- /dev/null +++ b/temba/contacts/tests/test_contactcrudl.py @@ -0,0 +1,1380 @@ +import io +from datetime import timedelta, timezone as tzone +from decimal import Decimal +from unittest.mock import call, patch + +import iso8601 + +from django.urls import reverse +from django.utils import timezone + +from temba import mailroom +from temba.airtime.models import AirtimeTransfer +from temba.campaigns.models import Campaign, CampaignEvent, EventFire +from temba.channels.models import ChannelEvent +from temba.contacts.models import URN, Contact, ContactExport, ContactField +from temba.flows.models import FlowSession, FlowStart +from temba.ivr.models import Call +from temba.locations.models import AdminBoundary +from temba.msgs.models import Msg +from temba.orgs.models import Export, OrgRole +from temba.schedules.models import Schedule +from temba.tests import CRUDLTestMixin, MockResponse, TembaTest, mock_mailroom +from temba.tests.engine import MockSessionWriter +from temba.tickets.models import Topic +from temba.triggers.models import Trigger +from temba.utils import json, s3 +from temba.utils.dates import datetime_to_timestamp +from temba.utils.views.mixins import TEMBA_MENU_SELECTION + + +class ContactCRUDLTest(CRUDLTestMixin, TembaTest): + def setUp(self): + super().setUp() + + self.country = AdminBoundary.create(osm_id="171496", name="Rwanda", level=0) + AdminBoundary.create(osm_id="1708283", name="Kigali", level=1, parent=self.country) + + self.create_field("age", "Age", value_type="N", show_in_table=True) + self.create_field("home", "Home", value_type="S", show_in_table=True, priority=10) + + # sample flows don't actually get created by org initialization during tests because there are no users at that + # point so create them explicitly here, so that we also get the sample groups + self.org.create_sample_flows("https://api.rapidpro.io") + + def create_campaign(self, contact): + self.farmers = self.create_group("Farmers", [contact]) + self.reminder_flow = self.create_flow("Reminder Flow") + self.planting_date = self.create_field("planting_date", "Planting Date", value_type=ContactField.TYPE_DATETIME) + self.campaign = Campaign.create(self.org, self.admin, "Planting Reminders", self.farmers) + + # create af flow event + self.planting_reminder = CampaignEvent.create_flow_event( + self.org, + self.admin, + self.campaign, + relative_to=self.planting_date, + offset=0, + unit="D", + flow=self.reminder_flow, + delivery_hour=17, + ) + + # and a message event + self.message_event = CampaignEvent.create_message_event( + self.org, + self.admin, + self.campaign, + relative_to=self.planting_date, + offset=7, + unit="D", + message="Sent 7 days after planting date", + ) + + def test_menu(self): + menu_url = reverse("contacts.contact_menu") + + self.assertRequestDisallowed(menu_url, [None, self.agent]) + self.assertPageMenu( + menu_url, + self.admin, + [ + "Active (0)", + "Archived (0)", + "Blocked (0)", + "Stopped (0)", + "Import", + "Fields (2)", + ("Groups", ["Open Tickets (0)", "Survey Audience (0)", "Unsatisfied Customers (0)"]), + ], + ) + + @mock_mailroom + def test_create(self, mr_mocks): + create_url = reverse("contacts.contact_create") + + self.assertRequestDisallowed(create_url, [None, self.agent, self.user]) + self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=("name", "phone")) + + # simulate validation failing because phone number taken + mr_mocks.contact_urns({"tel:+250781111111": 12345678}) + + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Joe", "phone": "+250781111111"}, + form_errors={"phone": "In use by another contact."}, + ) + + # simulate validation failing because phone number isn't E164 + mr_mocks.contact_urns({"tel:+250781111111": False}) + + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Joe", "phone": "+250781111111"}, + form_errors={"phone": "Ensure number includes country code."}, + ) + + # simulate validation failing because phone number isn't valid + mr_mocks.contact_urns({"tel:xx": "URN 0 invalid"}) + + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Joe", "phone": "xx"}, + form_errors={"phone": "Invalid phone number."}, + ) + + # try valid number + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Joe", "phone": "+250782222222"}, + new_obj_query=Contact.objects.filter(org=self.org, name="Joe", urns__identity="tel:+250782222222"), + success_status=200, + ) + + @mock_mailroom + def test_list(self, mr_mocks): + self.login(self.user) + list_url = reverse("contacts.contact_list") + + joe = self.create_contact("Joe", phone="123", fields={"age": "20", "home": "Kigali"}) + frank = self.create_contact("Frank", phone="124", fields={"age": "18"}) + + mr_mocks.contact_search('name != ""', contacts=[]) + self.create_group("No Name", query='name = ""') + + with self.assertNumQueries(16): + response = self.client.get(list_url) + + self.assertEqual([frank, joe], list(response.context["object_list"])) + self.assertIsNone(response.context["search_error"]) + self.assertEqual([], list(response.context["actions"])) + self.assertContentMenu(list_url, self.user, ["Export"]) + + active_contacts = self.org.active_contacts_group + + # fetch with spa flag + response = self.client.get(list_url, content_type="application/json", HTTP_X_TEMBA_SPA="1") + self.assertEqual(response.context["base_template"], "spa.html") + + mr_mocks.contact_search("age = 18", contacts=[frank]) + + response = self.client.get(list_url + "?search=age+%3D+18") + self.assertEqual(list(response.context["object_list"]), [frank]) + self.assertEqual(response.context["search"], "age = 18") + self.assertEqual(response.context["save_dynamic_search"], True) + self.assertIsNone(response.context["search_error"]) + self.assertEqual( + [f.name for f in response.context["contact_fields"]], ["Home", "Age", "Last Seen On", "Created On"] + ) + + mr_mocks.contact_search("age = 18", contacts=[frank], total=10020) + + # we return up to 10000 contacts when searching with ES, so last page is 200 + url = f'{reverse("contacts.contact_list")}?{"search=age+%3D+18&page=200"}' + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + + # when user requests page 201, we return a 404, page not found + url = f'{reverse("contacts.contact_list")}?{"search=age+%3D+18&page=201"}' + response = self.client.get(url) + + self.assertEqual(response.status_code, 404) + + mr_mocks.contact_search('age > 18 and home = "Kigali"', cleaned='age > 18 AND home = "Kigali"', contacts=[joe]) + + response = self.client.get(list_url + '?search=age+>+18+and+home+%3D+"Kigali"') + self.assertEqual(list(response.context["object_list"]), [joe]) + self.assertEqual(response.context["search"], 'age > 18 AND home = "Kigali"') + self.assertEqual(response.context["save_dynamic_search"], True) + self.assertIsNone(response.context["search_error"]) + + mr_mocks.contact_search("Joe", cleaned='name ~ "Joe"', contacts=[joe]) + + response = self.client.get(list_url + "?search=Joe") + self.assertEqual(list(response.context["object_list"]), [joe]) + self.assertEqual(response.context["search"], 'name ~ "Joe"') + self.assertEqual(response.context["save_dynamic_search"], True) + self.assertIsNone(response.context["search_error"]) + + with self.anonymous(self.org): + mr_mocks.contact_search(f"{joe.id}", cleaned=f"id = {joe.id}", contacts=[joe]) + + response = self.client.get(list_url + f"?search={joe.id}") + self.assertEqual(list(response.context["object_list"]), [joe]) + self.assertIsNone(response.context["search_error"]) + self.assertEqual(response.context["search"], f"id = {joe.id}") + self.assertEqual(response.context["save_dynamic_search"], False) + + # try with invalid search string + mr_mocks.exception(mailroom.QueryValidationException("mismatched input at (((", "syntax")) + + response = self.client.get(list_url + "?search=(((") + self.assertEqual(list(response.context["object_list"]), []) + self.assertEqual(response.context["search_error"], "Invalid query syntax.") + self.assertContains(response, "Invalid query syntax.") + + self.login(self.admin) + + # admins can see bulk actions + age_query = "?search=age%20%3E%2050" + response = self.client.get(list_url) + self.assertEqual([frank, joe], list(response.context["object_list"])) + self.assertEqual(["block", "archive", "send", "start-flow"], list(response.context["actions"])) + + self.assertContentMenu( + list_url, + self.admin, + ["New Contact", "New Group", "Export"], + ) + self.assertContentMenu( + list_url + age_query, + self.admin, + ["Create Smart Group", "New Contact", "New Group", "Export"], + ) + + # TODO: group labeling as a feature is on probation + # self.client.post(list_url, {"action": "label", "objects": frank.id, "label": survey_audience.id}) + # self.assertIn(frank, survey_audience.contacts.all()) + + # try label bulk action against search results + # self.client.post(list_url + "?search=Joe", {"action": "label", "objects": joe.id, "label": survey_audience.id}) + # self.assertIn(joe, survey_audience.contacts.all()) + + # self.assertEqual( + # call(self.org.id, group_uuid=str(active_contacts.uuid), query="Joe", sort="", offset=0, exclude_ids=[]), + # mr_mocks.calls["contact_search"][-1], + # ) + + # try archive bulk action + self.client.post(list_url + "?search=Joe", {"action": "archive", "objects": joe.id}) + + # we re-run the search for the response, but exclude Joe + self.assertEqual( + call(self.org, active_contacts, "Joe", sort="", offset=0, exclude_ids=[joe.id]), + mr_mocks.calls["contact_search"][-1], + ) + + response = self.client.get(list_url) + self.assertEqual([frank], list(response.context["object_list"])) + + joe.refresh_from_db() + self.assertEqual(Contact.STATUS_ARCHIVED, joe.status) + + @mock_mailroom + def test_blocked(self, mr_mocks): + joe = self.create_contact("Joe", urns=["twitter:joe"]) + frank = self.create_contact("Frank", urns=["twitter:frank"]) + billy = self.create_contact("Billy", urns=["twitter:billy"]) + self.create_contact("Mary", urns=["twitter:mary"]) + + joe.block(self.admin) + frank.block(self.admin) + billy.block(self.admin) + + self.login(self.user) + + blocked_url = reverse("contacts.contact_blocked") + + self.assertRequestDisallowed(blocked_url, [None, self.agent]) + response = self.assertListFetch(blocked_url, [self.editor, self.admin], context_objects=[billy, frank, joe]) + self.assertEqual(["restore", "archive"], list(response.context["actions"])) + self.assertContentMenu(blocked_url, self.admin, ["Export"]) + + # try restore bulk action + self.client.post(blocked_url, {"action": "restore", "objects": billy.id}) + + response = self.client.get(blocked_url) + self.assertEqual([frank, joe], list(response.context["object_list"])) + + billy.refresh_from_db() + self.assertEqual(Contact.STATUS_ACTIVE, billy.status) + + # try archive bulk action + self.client.post(blocked_url, {"action": "archive", "objects": frank.id}) + + response = self.client.get(blocked_url) + self.assertEqual([joe], list(response.context["object_list"])) + + frank.refresh_from_db() + self.assertEqual(Contact.STATUS_ARCHIVED, frank.status) + + @mock_mailroom + def test_stopped(self, mr_mocks): + joe = self.create_contact("Joe", urns=["twitter:joe"]) + frank = self.create_contact("Frank", urns=["twitter:frank"]) + billy = self.create_contact("Billy", urns=["twitter:billy"]) + self.create_contact("Mary", urns=["twitter:mary"]) + + joe.stop(self.admin) + frank.stop(self.admin) + billy.stop(self.admin) + + self.login(self.user) + + stopped_url = reverse("contacts.contact_stopped") + + self.assertRequestDisallowed(stopped_url, [None, self.agent]) + response = self.assertListFetch( + stopped_url, [self.user, self.editor, self.admin], context_objects=[billy, frank, joe] + ) + self.assertEqual(["restore", "archive"], list(response.context["actions"])) + self.assertContentMenu(stopped_url, self.admin, ["Export"]) + + # try restore bulk action + self.client.post(stopped_url, {"action": "restore", "objects": billy.id}) + + response = self.client.get(stopped_url) + self.assertEqual([frank, joe], list(response.context["object_list"])) + + billy.refresh_from_db() + self.assertEqual(Contact.STATUS_ACTIVE, billy.status) + + # try archive bulk action + self.client.post(stopped_url, {"action": "archive", "objects": frank.id}) + + response = self.client.get(stopped_url) + self.assertEqual([joe], list(response.context["object_list"])) + + frank.refresh_from_db() + self.assertEqual(Contact.STATUS_ARCHIVED, frank.status) + + @patch("temba.contacts.models.Contact.BULK_RELEASE_IMMEDIATELY_LIMIT", 5) + @mock_mailroom + def test_archived(self, mr_mocks): + joe = self.create_contact("Joe", urns=["twitter:joe"]) + frank = self.create_contact("Frank", urns=["twitter:frank"]) + billy = self.create_contact("Billy", urns=["twitter:billy"]) + self.create_contact("Mary", urns=["twitter:mary"]) + + joe.archive(self.admin) + frank.archive(self.admin) + billy.archive(self.admin) + + self.login(self.user) + + archived_url = reverse("contacts.contact_archived") + + self.assertRequestDisallowed(archived_url, [None, self.agent]) + response = self.assertListFetch( + archived_url, [self.user, self.editor, self.admin], context_objects=[billy, frank, joe] + ) + self.assertEqual(["restore", "delete"], list(response.context["actions"])) + self.assertContentMenu(archived_url, self.admin, ["Export", "Delete All"]) + + # try restore bulk action + self.client.post(archived_url, {"action": "restore", "objects": billy.id}) + + response = self.client.get(archived_url) + self.assertEqual([frank, joe], list(response.context["object_list"])) + + billy.refresh_from_db() + self.assertEqual(Contact.STATUS_ACTIVE, billy.status) + + # try delete bulk action + self.client.post(archived_url, {"action": "delete", "objects": frank.id}) + + response = self.client.get(archived_url) + self.assertEqual([joe], list(response.context["object_list"])) + + frank.refresh_from_db() + self.assertFalse(frank.is_active) + + # the archived view also supports deleting all + self.client.post(archived_url, {"action": "delete", "all": "true"}) + + response = self.client.get(archived_url) + self.assertEqual([], list(response.context["object_list"])) + + # only archived contacts affected + self.assertEqual(2, Contact.objects.filter(is_active=False, status=Contact.STATUS_ARCHIVED).count()) + self.assertEqual(2, Contact.objects.filter(is_active=False).count()) + + # for larger numbers of contacts, a background task is used + for c in range(6): + contact = self.create_contact(f"Bob{c}", urns=[f"twitter:bob{c}"]) + contact.archive(self.user) + + response = self.client.get(archived_url) + self.assertEqual(6, len(response.context["object_list"])) + + self.client.post(archived_url, {"action": "delete", "all": "true"}) + + response = self.client.get(archived_url) + self.assertEqual(0, len(response.context["object_list"])) + + @mock_mailroom + def test_group(self, mr_mocks): + open_tickets = self.org.groups.get(name="Open Tickets") + joe = self.create_contact("Joe", phone="123") + frank = self.create_contact("Frank", phone="124") + self.create_contact("Bob", phone="125") + + mr_mocks.contact_search("age > 40", contacts=[frank], total=1) + + group1 = self.create_group("Testers", contacts=[joe, frank]) # static group + group2 = self.create_group("Oldies", query="age > 40") # smart group + group2.contacts.add(frank) + group3 = self.create_group("Other Org", org=self.org2) + + group1_url = reverse("contacts.contact_group", args=[group1.uuid]) + group2_url = reverse("contacts.contact_group", args=[group2.uuid]) + group3_url = reverse("contacts.contact_group", args=[group3.uuid]) + open_tickets_url = reverse("contacts.contact_group", args=[open_tickets.uuid]) + + self.assertRequestDisallowed(group1_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(group1_url, [self.user, self.editor, self.admin]) + + self.assertEqual([frank, joe], list(response.context["object_list"])) + self.assertEqual(["block", "unlabel", "send", "start-flow"], list(response.context["actions"])) + self.assertEqual( + [f.name for f in response.context["contact_fields"]], ["Home", "Age", "Last Seen On", "Created On"] + ) + + self.assertContentMenu( + group1_url, + self.admin, + ["Edit", "Export", "Usages", "Delete"], + ) + + response = self.assertReadFetch(group2_url, [self.editor]) + + self.assertEqual([frank], list(response.context["object_list"])) + self.assertEqual(["block", "archive", "send", "start-flow"], list(response.context["actions"])) + self.assertContains(response, "age > 40") + + # can access system group like any other except no options to edit or delete + response = self.assertReadFetch(open_tickets_url, [self.editor]) + self.assertEqual([], list(response.context["object_list"])) + self.assertEqual(["block", "archive", "send", "start-flow"], list(response.context["actions"])) + self.assertContains(response, "tickets > 0") + self.assertContentMenu(open_tickets_url, self.admin, ["Export", "Usages"]) + + # if a user tries to access a non-existent group, that's a 404 + response = self.requestView(reverse("contacts.contact_group", args=["21343253"]), self.admin) + self.assertEqual(404, response.status_code) + + # if a user tries to access a group in another org, send them to the login page + response = self.requestView(group3_url, self.admin) + self.assertLoginRedirect(response) + + # if the user has access to that org, we redirect to the org choose page + self.org2.add_user(self.admin, OrgRole.ADMINISTRATOR) + response = self.requestView(group3_url, self.admin) + self.assertRedirect(response, "/org/choose/") + + @mock_mailroom + def test_read(self, mr_mocks): + joe = self.create_contact("Joe", phone="123") + + read_url = reverse("contacts.contact_read", args=[joe.uuid]) + + self.assertRequestDisallowed(read_url, [None, self.agent]) + + self.assertContentMenu(read_url, self.user, []) + self.assertContentMenu(read_url, self.editor, ["Edit", "Start Flow", "Open Ticket"]) + self.assertContentMenu(read_url, self.admin, ["Edit", "Start Flow", "Open Ticket"]) + + # if there's an open ticket already, don't show open ticket option + self.create_ticket(joe) + self.assertContentMenu(read_url, self.editor, ["Edit", "Start Flow"]) + + # login as viewer + self.login(self.user) + + response = self.client.get(read_url) + self.assertContains(response, "Joe") + + # login as admin + self.login(self.admin) + + response = self.client.get(read_url) + self.assertContains(response, "Joe") + self.assertEqual("/contact/active", response.headers[TEMBA_MENU_SELECTION]) + + # block the contact + joe.block(self.admin) + self.assertTrue(Contact.objects.get(pk=joe.id, status="B")) + + self.assertContentMenu(read_url, self.admin, ["Edit"]) + + response = self.client.get(read_url) + self.assertContains(response, "Joe") + self.assertEqual("/contact/blocked", response.headers[TEMBA_MENU_SELECTION]) + + # can't access a deleted contact + joe.release(self.admin) + + response = self.client.get(read_url) + self.assertEqual(response.status_code, 404) + + # contact with only a urn + nameless = self.create_contact("", urns=["twitter:bobby_anon"]) + response = self.client.get(reverse("contacts.contact_read", args=[nameless.uuid])) + self.assertContains(response, "bobby_anon") + + # contact without name or urn + nameless = Contact.objects.create(org=self.org) + response = self.client.get(reverse("contacts.contact_read", args=[nameless.uuid])) + self.assertContains(response, "Contact Details") + + # invalid uuid should return 404 + response = self.client.get(reverse("contacts.contact_read", args=["invalid-uuid"])) + self.assertEqual(response.status_code, 404) + + def test_history(self): + joe = self.create_contact(name="Joe Blow", urns=["twitter:blow80", "tel:+250781111111"]) + joe.created_on = timezone.now() - timedelta(days=1000) + joe.save(update_fields=("created_on",)) + kurt = self.create_contact("Kurt", phone="123123") + + history_url = reverse("contacts.contact_history", args=[joe.uuid]) + + self.create_broadcast(self.user, {"eng": {"text": "A beautiful broadcast"}}, contacts=[joe]) + self.create_campaign(joe) + + # add a message with some attachments + self.create_incoming_msg( + joe, + "Message caption", + created_on=timezone.now(), + attachments=[ + "audio/mp3:http://blah/file.mp3", + "video/mp4:http://blah/file.mp4", + "geo:47.5414799,-122.6359908", + ], + ) + + # create some messages + for i in range(94): + self.create_incoming_msg( + joe, "Inbound message %d" % i, created_on=timezone.now() - timedelta(days=(100 - i)) + ) + + # because messages are stored with timestamps from external systems, possible to have initial message + # which is little bit older than the contact itself + self.create_incoming_msg(joe, "Very old inbound message", created_on=joe.created_on - timedelta(seconds=10)) + + flow = self.get_flow("color_v13") + nodes = flow.get_definition()["nodes"] + color_prompt = nodes[0] + color_split = nodes[4] + + ( + MockSessionWriter(joe, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .call_webhook("POST", "https://example.com/", "1234") # pretend that flow run made a webhook request + .visit(color_split) + .set_result("Color", "green", "Green", "I like green") + .wait() + .save() + ) + ( + MockSessionWriter(kurt, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ) + + # mark an outgoing message as failed + failed = Msg.objects.filter(direction="O", contact=joe).last() + failed.status = "F" + failed.save(update_fields=("status",)) + + # create an airtime transfer + AirtimeTransfer.objects.create( + org=self.org, + status="S", + contact=joe, + currency="RWF", + desired_amount=Decimal("100"), + actual_amount=Decimal("100"), + ) + + # create an event from the past + scheduled = timezone.now() - timedelta(days=5) + EventFire.objects.create(event=self.planting_reminder, contact=joe, scheduled=scheduled, fired=scheduled) + + # two tickets for joe + sales = Topic.create(self.org, self.admin, "Sales") + self.create_ticket(joe, opened_on=timezone.now(), closed_on=timezone.now()) + ticket = self.create_ticket(joe, topic=sales) + + # create missed incoming and outgoing calls + self.create_channel_event( + self.channel, str(joe.get_urn(URN.TEL_SCHEME)), ChannelEvent.TYPE_CALL_OUT_MISSED, extra={} + ) + self.create_channel_event( + self.channel, str(joe.get_urn(URN.TEL_SCHEME)), ChannelEvent.TYPE_CALL_IN_MISSED, extra={} + ) + + # and a referral event + self.create_channel_event( + self.channel, str(joe.get_urn(URN.TEL_SCHEME)), ChannelEvent.TYPE_NEW_CONVERSATION, extra={} + ) + + # add a failed call + Call.objects.create( + contact=joe, + status=Call.STATUS_ERRORED, + error_reason=Call.ERROR_NOANSWER, + channel=self.channel, + org=self.org, + contact_urn=joe.urns.all().first(), + error_count=0, + ) + + # add a note to our open ticket + ticket.events.create( + org=self.org, + contact=ticket.contact, + event_type="N", + note="I have a bad feeling about this", + created_by=self.admin, + ) + + # create an assignment + ticket.events.create( + org=self.org, + contact=ticket.contact, + event_type="A", + created_by=self.admin, + assignee=self.admin, + ) + + # set an output URL on our session so we fetch from there + s = FlowSession.objects.get(contact=joe) + s3.client().put_object( + Bucket="test-sessions", Key="c/session.json", Body=io.BytesIO(json.dumps(s.output).encode()) + ) + FlowSession.objects.filter(id=s.id).update(output_url="http://minio:9000/test-sessions/c/session.json") + + # fetch our contact history + self.login(self.admin) + with self.assertNumQueries(25): + response = self.client.get(history_url + "?limit=100") + + # history should include all messages in the last 90 days, the channel event, the call, and the flow run + history = response.json()["events"] + self.assertEqual(96, len(history)) + + def assertHistoryEvent(events, index, expected_type, **kwargs): + item = events[index] + self.assertEqual(expected_type, item["type"], f"event type mismatch for item {index}") + self.assertTrue(iso8601.parse_date(item["created_on"])) # check created_on exists and is ISO string + + for path, expected in kwargs.items(): + self.assertPathValue(item, path, expected, f"item {index}") + + assertHistoryEvent(history, 0, "call_started", status="E", status_display="Errored (No Answer)") + assertHistoryEvent(history, 1, "channel_event", channel_event_type="new_conversation") + assertHistoryEvent(history, 2, "channel_event", channel_event_type="mo_miss") + assertHistoryEvent(history, 3, "channel_event", channel_event_type="mt_miss") + assertHistoryEvent(history, 4, "ticket_opened", ticket__topic__name="Sales") + assertHistoryEvent(history, 5, "ticket_closed", ticket__topic__name="General") + assertHistoryEvent(history, 6, "ticket_opened", ticket__topic__name="General") + assertHistoryEvent(history, 7, "airtime_transferred", actual_amount="100.00") + assertHistoryEvent(history, 8, "msg_created", msg__text="What is your favorite color?") + assertHistoryEvent(history, 9, "flow_entered", flow__name="Colors") + assertHistoryEvent(history, 10, "msg_received", msg__text="Message caption") + assertHistoryEvent( + history, 11, "msg_created", msg__text="A beautiful broadcast", created_by__email="viewer@textit.com" + ) + assertHistoryEvent(history, 12, "campaign_fired", campaign__name="Planting Reminders") + assertHistoryEvent(history, -1, "msg_received", msg__text="Inbound message 11") + + # revert back to reading only from DB + FlowSession.objects.filter(id=s.id).update(output_url=None) + + # can filter by ticket to only all ticket events from that ticket rather than some events from all tickets + response = self.client.get(history_url + f"?ticket={ticket.uuid}&limit=100") + history = response.json()["events"] + assertHistoryEvent(history, 0, "ticket_assigned", assignee__id=self.admin.id) + assertHistoryEvent(history, 1, "ticket_note_added", note="I have a bad feeling about this") + assertHistoryEvent(history, 5, "channel_event", channel_event_type="mt_miss") + assertHistoryEvent(history, 6, "ticket_opened", ticket__topic__name="Sales") + assertHistoryEvent(history, 7, "airtime_transferred", actual_amount="100.00") + + # fetch next page + before = datetime_to_timestamp(timezone.now() - timedelta(days=90)) + response = self.requestView(history_url + "?limit=100&before=%d" % before, self.admin) + self.assertFalse(response.json()["has_older"]) + + # activity should include 11 remaining messages and the event fire + history = response.json()["events"] + self.assertEqual(12, len(history)) + assertHistoryEvent(history, 0, "msg_received", msg__text="Inbound message 10") + assertHistoryEvent(history, 10, "msg_received", msg__text="Inbound message 0") + assertHistoryEvent(history, 11, "msg_received", msg__text="Very old inbound message") + + response = self.requestView(history_url + "?limit=100", self.admin) + history = response.json()["events"] + + self.assertEqual(96, len(history)) + assertHistoryEvent(history, 8, "msg_created", msg__text="What is your favorite color?") + + # if a new message comes in + self.create_incoming_msg(joe, "Newer message") + response = self.requestView(history_url, self.admin) + + # now we'll see the message that just came in first, followed by the call event + history = response.json()["events"] + assertHistoryEvent(history, 0, "msg_received", msg__text="Newer message") + assertHistoryEvent(history, 1, "call_started", status="E", status_display="Errored (No Answer)") + + recent_start = datetime_to_timestamp(timezone.now() - timedelta(days=1)) + response = self.requestView(history_url + "?limit=100&after=%s" % recent_start, self.admin) + + # with our recent flag on, should not see the older messages + events = response.json()["events"] + self.assertEqual(13, len(events)) + self.assertContains(response, "file.mp4") + + # add a new run + ( + MockSessionWriter(joe, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ) + + response = self.requestView(history_url + "?limit=200", self.admin) + history = response.json()["events"] + self.assertEqual(100, len(history)) + + # before date should not match our last activity, that only happens when we truncate + resp_json = response.json() + self.assertNotEqual( + resp_json["next_before"], + datetime_to_timestamp(iso8601.parse_date(resp_json["events"][-1]["created_on"])), + ) + + assertHistoryEvent(history, 0, "msg_created", msg__text="What is your favorite color?") + assertHistoryEvent(history, 1, "flow_entered") + assertHistoryEvent(history, 2, "flow_exited") + assertHistoryEvent(history, 3, "msg_received", msg__text="Newer message") + assertHistoryEvent(history, 4, "call_started") + assertHistoryEvent(history, 5, "channel_event") + assertHistoryEvent(history, 6, "channel_event") + assertHistoryEvent(history, 7, "channel_event") + assertHistoryEvent(history, 8, "ticket_opened") + assertHistoryEvent(history, 9, "ticket_closed") + assertHistoryEvent(history, 10, "ticket_opened") + assertHistoryEvent(history, 11, "airtime_transferred") + assertHistoryEvent(history, 12, "msg_created", msg__text="What is your favorite color?") + assertHistoryEvent(history, 13, "flow_entered") + + # make our message event older than our planting reminder + self.message_event.created_on = self.planting_reminder.created_on - timedelta(days=1) + self.message_event.save() + + # but fire it immediately + scheduled = timezone.now() + EventFire.objects.create(event=self.message_event, contact=joe, scheduled=scheduled, fired=scheduled) + + # when fetched with limit of 1, it should be the only event we see + response = self.requestView( + history_url + "?limit=1&before=%d" % datetime_to_timestamp(scheduled + timedelta(minutes=5)), self.admin + ) + assertHistoryEvent(response.json()["events"], 0, "campaign_fired", campaign_event__id=self.message_event.id) + + # now try the proper max history to test truncation + response = self.requestView(history_url + "?before=%d" % datetime_to_timestamp(timezone.now()), self.admin) + + # our before should be the same as the last item + resp_json = response.json() + last_item_date = datetime_to_timestamp(iso8601.parse_date(resp_json["events"][-1]["created_on"])) + self.assertEqual(resp_json["next_before"], last_item_date) + + # and our after should be 90 days earlier + self.assertEqual(resp_json["next_after"], last_item_date - (90 * 24 * 60 * 60 * 1000 * 1000)) + self.assertEqual(50, len(resp_json["events"])) + + # and we should have a marker for older items + self.assertTrue(resp_json["has_older"]) + + # can't view history of contact in other org + other_org_contact = self.create_contact("Fred", phone="+250768111222", org=self.org2) + response = self.client.get(reverse("contacts.contact_history", args=[other_org_contact.uuid])) + self.assertEqual(response.status_code, 404) + + # invalid UUID should return 404 + response = self.client.get(reverse("contacts.contact_history", args=["837d0842-4f6b-4751-bf21-471df75ce786"])) + self.assertEqual(response.status_code, 404) + + def test_history_session_events(self): + joe = self.create_contact(name="Joe Blow", urns=["twitter:blow80", "tel:+250781111111"]) + + history_url = reverse("contacts.contact_history", args=[joe.uuid]) + + flow = self.get_flow("color_v13") + nodes = flow.get_definition()["nodes"] + ( + MockSessionWriter(joe, flow) + .visit(nodes[0]) + .add_contact_urn("twitter", "joey") + .set_contact_field("gender", "Gender", "M") + .set_contact_field("age", "Age", "") + .set_contact_language("spa") + .set_contact_language("") + .set_contact_name("Joe") + .set_contact_name("") + .set_result("Color", "red", "Red", "it's red") + .send_email(["joe@textit.com"], "Test", "Hello there Joe") + .error("unable to send email") + .fail("this is a failure") + .save() + ) + + self.login(self.user) + + response = self.client.get(history_url) + self.assertEqual(200, response.status_code) + + resp_json = response.json() + self.assertEqual(9, len(resp_json["events"])) + self.assertEqual( + [ + "flow_exited", + "contact_name_changed", + "contact_name_changed", + "contact_language_changed", + "contact_language_changed", + "contact_field_changed", + "contact_field_changed", + "contact_urns_changed", + "flow_entered", + ], + [e["type"] for e in resp_json["events"]], + ) + + @mock_mailroom + def test_update(self, mr_mocks): + self.org.flow_languages = ["eng", "spa"] + self.org.save(update_fields=("flow_languages",)) + + self.create_field("gender", "Gender", value_type=ContactField.TYPE_TEXT) + contact = self.create_contact( + "Bob", + urns=["tel:+593979111111", "tel:+593979222222", "telegram:5474754"], + fields={"age": 41, "gender": "M"}, + language="eng", + ) + testers = self.create_group("Testers", contacts=[contact]) + self.create_contact("Ann", urns=["tel:+593979444444"]) + + update_url = reverse("contacts.contact_update", args=[contact.id]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields={ + "name": "Bob", + "status": "A", + "language": "eng", + "groups": [testers], + "new_scheme": None, + "new_path": None, + "urn__tel__0": "+593979111111", + "urn__tel__1": "+593979222222", + "urn__telegram__2": "5474754", + }, + ) + + # try to take URN in use by another contact + mr_mocks.contact_urns({"tel:+593979444444": 12345678}) + + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Bobby", "status": "B", "language": "spa", "groups": [testers.id], "urn__tel__0": "+593979444444"}, + form_errors={"urn__tel__0": "In use by another contact."}, + object_unchanged=contact, + ) + + # try to update to an invalid URN + mr_mocks.contact_urns({"tel:++++": "invalid path component"}) + + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Bobby", "status": "B", "language": "spa", "groups": [testers.id], "urn__tel__0": "++++"}, + form_errors={"urn__tel__0": "Invalid format."}, + object_unchanged=contact, + ) + + # try to add a new invalid phone URN + mr_mocks.contact_urns({"tel:123": "not a valid phone number"}) + + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "Bobby", + "status": "B", + "language": "spa", + "groups": [testers.id], + "urn__tel__0": "+593979111111", + "new_scheme": "tel", + "new_path": "123", + }, + form_errors={"new_path": "Invalid format."}, + object_unchanged=contact, + ) + + # try to add a new phone URN that isn't E164 + mr_mocks.contact_urns({"tel:123": False}) + + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "Bobby", + "status": "B", + "language": "spa", + "groups": [testers.id], + "urn__tel__0": "+593979111111", + "new_scheme": "tel", + "new_path": "123", + }, + form_errors={"new_path": "Invalid phone number. Ensure number includes country code."}, + object_unchanged=contact, + ) + + # update all fields (removes second tel URN, adds a new Facebook URN) + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "Bobby", + "status": "B", + "language": "spa", + "groups": [testers.id], + "urn__tel__0": "+593979333333", + "urn__telegram__2": "78686776", + "new_scheme": "facebook", + "new_path": "9898989", + }, + success_status=200, + ) + + contact.refresh_from_db() + self.assertEqual("Bobby", contact.name) + self.assertEqual(Contact.STATUS_BLOCKED, contact.status) + self.assertEqual("spa", contact.language) + self.assertEqual({testers}, set(contact.get_groups())) + self.assertEqual( + ["tel:+593979333333", "telegram:78686776", "facebook:9898989"], + [u.identity for u in contact.urns.order_by("-priority")], + ) + + # for non-active contacts, shouldn't see groups on form + self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields={ + "name": "Bobby", + "status": "B", + "language": "spa", + "new_scheme": None, + "new_path": None, + "urn__tel__0": "+593979333333", + "urn__telegram__1": "78686776", + "urn__facebook__2": "9898989", + }, + ) + + # try to update with invalid URNs + mr_mocks.contact_urns({"tel:456": "invalid path component", "facebook:xxxxx": "invalid path component"}) + + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "Bobby", + "status": "B", + "language": "spa", + "groups": [], + "urn__tel__0": "456", + "urn__facebook__2": "xxxxx", + }, + form_errors={ + "urn__tel__0": "Invalid format.", + "urn__facebook__2": "Invalid format.", + }, + object_unchanged=contact, + ) + + # if contact has a language which is no longer a flow language, it should still be a valid option on the form + contact.language = "kin" + contact.save(update_fields=("language",)) + + response = self.assertUpdateFetch( + update_url, + [self.admin], + form_fields={ + "name": "Bobby", + "status": "B", + "language": "kin", + "new_scheme": None, + "new_path": None, + "urn__tel__0": "+593979333333", + "urn__telegram__1": "78686776", + "urn__facebook__2": "9898989", + }, + ) + self.assertContains(response, "Kinyarwanda") + + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "Bobby", + "status": "A", + "language": "kin", + "urn__tel__0": "+593979333333", + "urn__telegram__1": "78686776", + "urn__facebook__2": "9898989", + }, + success_status=200, + ) + + contact.refresh_from_db() + self.assertEqual("Bobby", contact.name) + self.assertEqual(Contact.STATUS_ACTIVE, contact.status) + self.assertEqual("kin", contact.language) + + def test_update_urns_field(self): + contact = self.create_contact("Bob", urns=[]) + + update_url = reverse("contacts.contact_update", args=[contact.id]) + + # we have a field to add new urns + response = self.requestView(update_url, self.admin) + self.assertContains(response, "Add Connection") + + # no field to add new urns for anon org + with self.anonymous(self.org): + response = self.requestView(update_url, self.admin) + self.assertNotContains(response, "Add Connection") + + @mock_mailroom + def test_update_with_mailroom_error(self, mr_mocks): + mr_mocks.exception(mailroom.RequestException("", "", MockResponse(400, '{"error": "Error updating contact"}'))) + + contact = self.create_contact("Joe", phone="1234") + + self.login(self.admin) + + response = self.client.post( + reverse("contacts.contact_update", args=[contact.id]), + {"name": "Joe", "status": Contact.STATUS_ACTIVE, "language": "eng"}, + ) + + self.assertFormError( + response.context["form"], None, "An error occurred updating your contact. Please try again later." + ) + + @mock_mailroom + def test_export(self, mr_mocks): + export_url = reverse("contacts.contact_export") + + self.assertRequestDisallowed(export_url, [None, self.agent]) + response = self.assertUpdateFetch(export_url, [self.editor, self.admin], form_fields=("with_groups",)) + self.assertNotContains(response, "already an export in progress") + + # create a dummy export task so that we won't be able to export + blocking_export = ContactExport.create(self.org, self.admin) + + response = self.client.get(export_url) + self.assertContains(response, "already an export in progress") + + # check we can't submit in case a user opens the form and whilst another user is starting an export + response = self.client.post(export_url, {}) + self.assertContains(response, "already an export in progress") + self.assertEqual(1, Export.objects.count()) + + # mark that one as finished so it's no longer a blocker + blocking_export.status = Export.STATUS_COMPLETE + blocking_export.save(update_fields=("status",)) + + # try to export a group that is too big + big_group = self.create_group("Big Group", contacts=[]) + mr_mocks.contact_export_preview(1_000_123) + + response = self.client.get(export_url + f"?g={big_group.uuid}") + self.assertContains(response, "This group or search is too large to export.") + + response = self.client.post( + export_url + f"?g={self.org.active_contacts_group.uuid}", {"with_groups": [big_group.id]} + ) + self.assertEqual(200, response.status_code) + + export = Export.objects.exclude(id=blocking_export.id).get() + self.assertEqual("contact", export.export_type) + self.assertEqual( + {"group_id": self.org.active_contacts_group.id, "search": None, "with_groups": [big_group.id]}, + export.config, + ) + + def test_scheduled(self): + contact1 = self.create_contact("Joe", phone="+1234567890") + contact2 = self.create_contact("Frank", phone="+1204567802") + farmers = self.create_group("Farmers", contacts=[contact1, contact2]) + + schedule_url = reverse("contacts.contact_scheduled", args=[contact1.uuid]) + + self.assertRequestDisallowed(schedule_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(schedule_url, [self.user, self.editor, self.admin]) + self.assertEqual({"results": []}, response.json()) + + # create a campaign and event fires for this contact + campaign = Campaign.create(self.org, self.admin, "Reminders", farmers) + joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) + event2_flow = self.create_flow("Reminder Flow") + event1 = CampaignEvent.create_message_event(self.org, self.admin, campaign, joined, 2, unit="D", message="Hi") + event2 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, joined, 2, unit="D", flow=event2_flow) + fire1 = EventFire.objects.create(event=event1, contact=contact1, scheduled=timezone.now() + timedelta(days=2)) + fire2 = EventFire.objects.create(event=event2, contact=contact1, scheduled=timezone.now() + timedelta(days=5)) + + # create scheduled and regular broadcasts which send to both groups + bcast1 = self.create_broadcast( + self.admin, + {"eng": {"text": "Hi again"}}, + contacts=[contact1, contact2], + schedule=Schedule.create(self.org, timezone.now() + timedelta(days=3), Schedule.REPEAT_DAILY), + ) + self.create_broadcast(self.admin, {"eng": {"text": "Bye"}}, contacts=[contact1, contact2]) # not scheduled + + # create scheduled trigger which this contact is explicitly added to + trigger1_flow = self.create_flow("Favorites 1") + trigger1 = Trigger.create( + self.org, + self.admin, + trigger_type=Trigger.TYPE_SCHEDULE, + flow=trigger1_flow, + schedule=Schedule.create(self.org, timezone.now() + timedelta(days=4), Schedule.REPEAT_WEEKLY), + ) + trigger1.contacts.add(contact1, contact2) + + # create scheduled trigger which this contact is added to via a group + trigger2_flow = self.create_flow("Favorites 2") + trigger2 = Trigger.create( + self.org, + self.admin, + trigger_type=Trigger.TYPE_SCHEDULE, + flow=trigger2_flow, + schedule=Schedule.create(self.org, timezone.now() + timedelta(days=6), Schedule.REPEAT_MONTHLY), + ) + trigger2.groups.add(farmers) + + # create scheduled trigger which this contact is explicitly added to... but also excluded from + trigger3 = Trigger.create( + self.org, + self.admin, + trigger_type=Trigger.TYPE_SCHEDULE, + flow=self.create_flow("Favorites 3"), + schedule=Schedule.create(self.org, timezone.now() + timedelta(days=4), Schedule.REPEAT_WEEKLY), + ) + trigger3.contacts.add(contact1, contact2) + trigger3.exclude_groups.add(farmers) + + response = self.requestView(schedule_url, self.admin) + self.assertEqual( + { + "results": [ + { + "type": "campaign_event", + "scheduled": fire1.scheduled.isoformat(), + "repeat_period": None, + "campaign": {"uuid": str(campaign.uuid), "name": "Reminders"}, + "message": "Hi", + }, + { + "type": "scheduled_broadcast", + "scheduled": bcast1.schedule.next_fire.astimezone(tzone.utc).isoformat(), + "repeat_period": "D", + "message": "Hi again", + }, + { + "type": "scheduled_trigger", + "scheduled": trigger1.schedule.next_fire.astimezone(tzone.utc).isoformat(), + "repeat_period": "W", + "flow": {"uuid": str(trigger1_flow.uuid), "name": "Favorites 1"}, + }, + { + "type": "campaign_event", + "scheduled": fire2.scheduled.isoformat(), + "repeat_period": None, + "campaign": {"uuid": str(campaign.uuid), "name": "Reminders"}, + "flow": {"uuid": str(event2_flow.uuid), "name": "Reminder Flow"}, + }, + { + "type": "scheduled_trigger", + "scheduled": trigger2.schedule.next_fire.astimezone(tzone.utc).isoformat(), + "repeat_period": "M", + "flow": {"uuid": str(trigger2_flow.uuid), "name": "Favorites 2"}, + }, + ] + }, + response.json(), + ) + + # fires for archived campaigns shouldn't appear + campaign.archive(self.admin) + + response = self.requestView(schedule_url, self.admin) + self.assertEqual(3, len(response.json()["results"])) + + @mock_mailroom + def test_open_ticket(self, mr_mocks): + contact = self.create_contact("Joe", phone="+593979000111") + general = self.org.default_ticket_topic + open_url = reverse("contacts.contact_open_ticket", args=[contact.id]) + + self.assertRequestDisallowed(open_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch(open_url, [self.editor, self.admin], form_fields=("topic", "assignee", "note")) + + # can submit with no assignee + response = self.assertUpdateSubmit(open_url, self.admin, {"topic": general.id, "body": "Help", "assignee": ""}) + + # should have new ticket + ticket = contact.tickets.get() + self.assertEqual(general, ticket.topic) + self.assertIsNone(ticket.assignee) + + # and we're redirected to that ticket + self.assertRedirect(response, f"/ticket/all/open/{ticket.uuid}/") + + @mock_mailroom + def test_interrupt(self, mr_mocks): + contact = self.create_contact("Joe", phone="+593979000111") + other_org_contact = self.create_contact("Hans", phone="+593979123456", org=self.org2) + + read_url = reverse("contacts.contact_read", args=[contact.uuid]) + interrupt_url = reverse("contacts.contact_interrupt", args=[contact.uuid]) + + self.login(self.admin) + + # shoud see start flow option + response = self.client.get(read_url) + self.assertContentMenu(read_url, self.admin, ["Edit", "Start Flow", "Open Ticket"]) + + MockSessionWriter(contact, self.create_flow("Test")).wait().save() + MockSessionWriter(other_org_contact, self.create_flow("Test", org=self.org2)).wait().save() + + # start option should be gone + self.assertContentMenu(read_url, self.admin, ["Edit", "Open Ticket"]) + + # can't interrupt if not logged in + self.client.logout() + response = self.client.post(interrupt_url) + self.assertLoginRedirect(response) + + self.login(self.user) + + # can't interrupt if just regular user + response = self.client.post(interrupt_url) + self.assertLoginRedirect(response) + + self.login(self.admin) + + response = self.client.post(interrupt_url) + self.assertEqual(302, response.status_code) + + contact.refresh_from_db() + self.assertIsNone(contact.current_flow) + + # can't interrupt contact in other org + other_contact_interrupt = reverse("contacts.contact_interrupt", args=[other_org_contact.uuid]) + response = self.client.post(other_contact_interrupt) + self.assertLoginRedirect(response) + + # contact should be unchanged + other_org_contact.refresh_from_db() + self.assertIsNotNone(other_org_contact.current_flow) + + @mock_mailroom + def test_delete(self, mr_mocks): + contact = self.create_contact("Joe", phone="+593979000111") + other_org_contact = self.create_contact("Hans", phone="+593979123456", org=self.org2) + + delete_url = reverse("contacts.contact_delete", args=[contact.id]) + + # can't delete if not logged in + response = self.client.post(delete_url, {"id": contact.id}) + self.assertLoginRedirect(response) + + self.login(self.user) + + # can't delete if just regular user + response = self.client.post(delete_url, {"id": contact.id}) + self.assertLoginRedirect(response) + + self.login(self.admin) + + response = self.client.post(delete_url, {"id": contact.id}) + self.assertEqual(302, response.status_code) + + contact.refresh_from_db() + self.assertFalse(contact.is_active) + + self.assertEqual([call(self.org, [contact])], mr_mocks.calls["contact_deindex"]) + + # can't delete contact in other org + delete_url = reverse("contacts.contact_delete", args=[other_org_contact.id]) + response = self.client.post(delete_url, {"id": other_org_contact.id}) + self.assertLoginRedirect(response) + + # contact should be unchanged + other_org_contact.refresh_from_db() + self.assertTrue(other_org_contact.is_active) + + @mock_mailroom + def test_start(self, mr_mocks): + sample_flows = list(self.org.flows.order_by("name")) + background_flow = self.create_flow("Background") + archived_flow = self.create_flow("Archived") + archived_flow.archive(self.admin) + + contact = self.create_contact("Joe", phone="+593979000111") + start_url = f"{reverse('flows.flow_start', args=[])}?flow={sample_flows[0].id}&c={contact.uuid}" + + self.assertRequestDisallowed(start_url, [None, self.user, self.agent]) + response = self.assertUpdateFetch(start_url, [self.editor, self.admin], form_fields=["flow", "contact_search"]) + + self.assertEqual([background_flow] + sample_flows, list(response.context["form"].fields["flow"].queryset)) + + # try to submit without specifying a flow + self.assertUpdateSubmit( + start_url, + self.admin, + data={}, + form_errors={"flow": "This field is required.", "contact_search": "This field is required."}, + object_unchanged=contact, + ) + + # submit with flow... + contact_search = dict(query=f"uuid='{contact.uuid}'", advanced=True) + self.assertUpdateSubmit( + start_url, self.admin, {"flow": background_flow.id, "contact_search": json.dumps(contact_search)} + ) + + # should now have a flow start + start = FlowStart.objects.get() + self.assertEqual(background_flow, start.flow) + self.assertEqual(contact_search["query"], start.query) + self.assertEqual({}, start.exclusions) + + # that has been queued to mailroom + self.assertEqual("start_flow", mr_mocks.queued_batch_tasks[-1]["type"]) diff --git a/temba/contacts/tests/test_export.py b/temba/contacts/tests/test_export.py new file mode 100644 index 00000000000..4f6ec00a472 --- /dev/null +++ b/temba/contacts/tests/test_export.py @@ -0,0 +1,622 @@ +import tempfile +from datetime import datetime, timezone as tzone + +from openpyxl import load_workbook + +from django.core.files.storage import default_storage + +from temba.contacts.models import Contact, ContactExport, ContactField, ContactGroup, ContactURN +from temba.orgs.models import Export +from temba.tests import TembaTest, mock_mailroom +from temba.tests.engine import MockSessionWriter + + +class ContactExportTest(TembaTest): + def setUp(self): + super().setUp() + + self.joe = self.create_contact(name="Joe Blow", phone="123") + self.frank = self.create_contact(name="Frank Smith", phone="1234") + + self.contactfield_1 = self.create_field("first", "First", priority=10) + self.contactfield_2 = self.create_field("second", "Second") + self.contactfield_3 = self.create_field("third", "Third", priority=20) + + def _export(self, group, search="", with_groups=()): + export = ContactExport.create(self.org, self.admin, group, search, with_groups=with_groups) + with self.mockReadOnly(assert_models={Contact, ContactURN, ContactField}): + export.perform() + + workbook = load_workbook( + filename=default_storage.open(f"orgs/{self.org.id}/contact_exports/{export.uuid}.xlsx") + ) + return workbook.worksheets, export + + @mock_mailroom + def test_export(self, mr_mocks): + # archive all our current contacts + Contact.apply_action_block(self.admin, self.org.contacts.all()) + + # make third a datetime + self.contactfield_3.value_type = ContactField.TYPE_DATETIME + self.contactfield_3.save() + + # start one of our contacts down it + contact = self.create_contact( + "Be\02n Haggerty", + phone="+12067799294", + fields={"first": "On\02e", "third": "20/12/2015 08:30"}, + last_seen_on=datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + ) + + flow = self.get_flow("color_v13") + nodes = flow.get_definition()["nodes"] + color_prompt = nodes[0] + color_split = nodes[4] + + ( + MockSessionWriter(self.joe, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ) + + # create another contact, this should sort before Ben + contact2 = self.create_contact("Adam Sumner", urns=["tel:+12067799191", "twitter:adam"], language="eng") + urns = [str(urn) for urn in contact2.get_urns()] + urns.append("mailto:adam@sumner.com") + urns.append("telegram:1234") + contact2.modify(self.admin, contact2.update_urns(urns)) + + group1 = self.create_group("Poppin Tags", [contact, contact2]) + group2 = self.create_group("Dynamic", query="tel is 1234") + group2.status = ContactGroup.STATUS_EVALUATING + group2.save() + + # create orphaned URN in scheme that no contacts have a URN for + ContactURN.objects.create(org=self.org, identity="line:12345", scheme="line", path="12345") + + def assertReimport(export): + with default_storage.open(f"orgs/{self.org.id}/contact_exports/{export.uuid}.xlsx") as exp: + with tempfile.NamedTemporaryFile(delete=False) as tmp: + tmp.write(exp.read()) + tmp.close() + + self.create_contact_import(tmp.name) + + with self.assertNumQueries(22): + sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) + self.assertEqual(2, export.num_records) + self.assertEqual("C", export.status) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:First", + "Field:Second", + "Group:Poppin Tags", + ], + [ + contact.uuid, + "Ben Haggerty", + "", + "Active", + contact.created_on, + datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + "", + "+12067799294", + "", + "", + "20-12-2015 08:30", + "One", + "", + True, + ], + [ + contact2.uuid, + "Adam Sumner", + "eng", + "Active", + contact2.created_on, + "", + "adam@sumner.com", + "+12067799191", + "1234", + "adam", + "", + "", + "", + True, + ], + ], + tz=self.org.timezone, + ) + + assertReimport(export) + + # check that notifications were created + export = Export.objects.filter(export_type=ContactExport.slug).order_by("id").last() + self.assertEqual(1, self.admin.notifications.filter(notification_type="export:finished", export=export).count()) + + # change the order of the fields + self.contactfield_2.priority = 15 + self.contactfield_2.save() + + with self.assertNumQueries(21): + sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) + self.assertEqual(2, export.num_records) + self.assertEqual("C", export.status) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + contact.uuid, + "Ben Haggerty", + "", + "Active", + contact.created_on, + datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + "", + "+12067799294", + "", + "", + "20-12-2015 08:30", + "", + "One", + True, + ], + [ + contact2.uuid, + "Adam Sumner", + "eng", + "Active", + contact2.created_on, + "", + "adam@sumner.com", + "+12067799191", + "1234", + "adam", + "", + "", + "", + True, + ], + ], + tz=self.org.timezone, + ) + + assertReimport(export) + + # more contacts do not increase the queries + contact3 = self.create_contact("Luol Deng", urns=["tel:+12078776655", "twitter:deng"]) + contact4 = self.create_contact("Stephen", urns=["tel:+12078778899", "twitter:stephen"]) + contact.urns.create(org=self.org, identity="tel:+12062233445", scheme="tel", path="+12062233445") + + # but should have additional Twitter and phone columns + with self.assertNumQueries(21): + sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) + self.assertEqual(4, export.num_records) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + contact.uuid, + "Ben Haggerty", + "", + "Active", + contact.created_on, + datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + "", + "+12067799294", + "+12062233445", + "", + "", + "20-12-2015 08:30", + "", + "One", + True, + ], + [ + contact2.uuid, + "Adam Sumner", + "eng", + "Active", + contact2.created_on, + "", + "adam@sumner.com", + "+12067799191", + "", + "1234", + "adam", + "", + "", + "", + True, + ], + [ + contact3.uuid, + "Luol Deng", + "", + "Active", + contact3.created_on, + "", + "", + "+12078776655", + "", + "", + "deng", + "", + "", + "", + False, + ], + [ + contact4.uuid, + "Stephen", + "", + "Active", + contact4.created_on, + "", + "", + "+12078778899", + "", + "", + "stephen", + "", + "", + "", + False, + ], + ], + tz=self.org.timezone, + ) + + assertReimport(export) + + # export a specified group of contacts (only Ben and Adam are in the group) + with self.assertNumQueries(21): + sheets, export = self._export(group1, with_groups=[group1]) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + contact.uuid, + "Ben Haggerty", + "", + "Active", + contact.created_on, + datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + "", + "+12067799294", + "+12062233445", + "", + "", + "20-12-2015 08:30", + "", + "One", + True, + ], + [ + contact2.uuid, + "Adam Sumner", + "eng", + "Active", + contact2.created_on, + "", + "adam@sumner.com", + "+12067799191", + "", + "1234", + "adam", + "", + "", + "", + True, + ], + ], + tz=self.org.timezone, + ) + + assertReimport(export) + + contact5 = self.create_contact("George", urns=["tel:+1234567777"], status=Contact.STATUS_STOPPED) + + # export a specified status group of contacts (Stopped) + sheets, export = self._export(self.org.groups.get(group_type="S"), with_groups=[group1]) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + contact5.uuid, + "George", + "", + "Stopped", + contact5.created_on, + "", + "", + "1234567777", + "", + "", + "", + "", + "", + "", + False, + ], + ], + tz=self.org.timezone, + ) + + # export a search + mr_mocks.contact_export([contact2.id, contact3.id]) + with self.assertNumQueries(22): + sheets, export = self._export( + self.org.active_contacts_group, "name has adam or name has deng", with_groups=[group1] + ) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + contact2.uuid, + "Adam Sumner", + "eng", + "Active", + contact2.created_on, + "", + "adam@sumner.com", + "+12067799191", + "", + "1234", + "adam", + "", + "", + "", + True, + ], + [ + contact3.uuid, + "Luol Deng", + "", + "Active", + contact3.created_on, + "", + "", + "+12078776655", + "", + "", + "deng", + "", + "", + "", + False, + ], + ], + tz=self.org.timezone, + ) + + assertReimport(export) + + # export a search within a specified group of contacts + mr_mocks.contact_export([contact.id]) + with self.assertNumQueries(20): + sheets, export = self._export(group1, search="Hagg", with_groups=[group1]) + self.assertExcelSheet( + sheets[0], + [ + [ + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "URN:Mailto", + "URN:Tel", + "URN:Tel", + "URN:Telegram", + "URN:Twitter", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + contact.uuid, + "Ben Haggerty", + "", + "Active", + contact.created_on, + datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + "", + "+12067799294", + "+12062233445", + "", + "", + "20-12-2015 08:30", + "", + "One", + True, + ], + ], + tz=self.org.timezone, + ) + + assertReimport(export) + + # now try with an anonymous org + with self.anonymous(self.org): + sheets, export = self._export(self.org.active_contacts_group, with_groups=[group1]) + self.assertExcelSheet( + sheets[0], + [ + [ + "ID", + "Scheme", + "Contact UUID", + "Name", + "Language", + "Status", + "Created On", + "Last Seen On", + "Field:Third", + "Field:Second", + "Field:First", + "Group:Poppin Tags", + ], + [ + str(contact.id), + "tel", + contact.uuid, + "Ben Haggerty", + "", + "Active", + contact.created_on, + datetime(2020, 1, 1, 12, 0, 0, 0, tzinfo=tzone.utc), + "20-12-2015 08:30", + "", + "One", + True, + ], + [ + str(contact2.id), + "tel", + contact2.uuid, + "Adam Sumner", + "eng", + "Active", + contact2.created_on, + "", + "", + "", + "", + True, + ], + [ + str(contact3.id), + "tel", + contact3.uuid, + "Luol Deng", + "", + "Active", + contact3.created_on, + "", + "", + "", + "", + False, + ], + [ + str(contact4.id), + "tel", + contact4.uuid, + "Stephen", + "", + "Active", + contact4.created_on, + "", + "", + "", + "", + False, + ], + ], + tz=self.org.timezone, + ) + assertReimport(export) diff --git a/temba/contacts/tests/test_field.py b/temba/contacts/tests/test_field.py new file mode 100644 index 00000000000..4ff2b0cca64 --- /dev/null +++ b/temba/contacts/tests/test_field.py @@ -0,0 +1,185 @@ +from django.urls import reverse + +from temba.contacts.models import ContactField +from temba.tests import TembaTest, mock_mailroom +from temba.utils import json + + +class ContactFieldTest(TembaTest): + def setUp(self): + super().setUp() + + self.joe = self.create_contact(name="Joe Blow", phone="123") + self.frank = self.create_contact(name="Frank Smith", phone="1234") + + self.contactfield_1 = self.create_field("first", "First", priority=10) + self.contactfield_2 = self.create_field("second", "Second") + self.contactfield_3 = self.create_field("third", "Third", priority=20) + + self.other_org_field = self.create_field("other", "Other", priority=10, org=self.org2) + + def test_get_or_create(self): + # name can be generated + field1 = ContactField.get_or_create(self.org, self.admin, "join_date") + self.assertEqual("join_date", field1.key) + self.assertEqual("Join Date", field1.name) + self.assertEqual(ContactField.TYPE_TEXT, field1.value_type) + self.assertFalse(field1.is_system) + + # or passed explicitly along with type + field2 = ContactField.get_or_create( + self.org, self.admin, "another", name="My Label", value_type=ContactField.TYPE_NUMBER + ) + self.assertEqual("another", field2.key) + self.assertEqual("My Label", field2.name) + self.assertEqual(ContactField.TYPE_NUMBER, field2.value_type) + + # if there's an existing key with this key we get that with name and type updated + field3 = ContactField.get_or_create( + self.org, self.admin, "another", name="Updated Label", value_type=ContactField.TYPE_DATETIME + ) + self.assertEqual(field2, field3) + self.assertEqual("another", field3.key) + self.assertEqual("Updated Label", field3.name) + self.assertEqual(ContactField.TYPE_DATETIME, field3.value_type) + + field4 = ContactField.get_or_create(self.org, self.admin, "another", name="Updated Again Label") + self.assertEqual(field3, field4) + self.assertEqual("another", field4.key) + self.assertEqual("Updated Again Label", field4.name) + self.assertEqual(ContactField.TYPE_DATETIME, field4.value_type) # unchanged + + # can't create with an invalid key + for key in ContactField.RESERVED_KEYS: + with self.assertRaises(ValueError): + ContactField.get_or_create(self.org, self.admin, key, key, value_type=ContactField.TYPE_TEXT) + + # provided names are made unique + field5 = ContactField.get_or_create(self.org, self.admin, "date_joined", name="join date") + self.assertEqual("date_joined", field5.key) + self.assertEqual("join date 2", field5.name) + + # and ignored if not valid + field6 = ContactField.get_or_create(self.org, self.admin, "date_joined", name=" ") + self.assertEqual(field5, field6) + self.assertEqual("date_joined", field6.key) + self.assertEqual("join date 2", field6.name) # unchanged + + # same for creating a new field + field7 = ContactField.get_or_create(self.org, self.admin, "new_key", name=" ") + self.assertEqual("new_key", field7.key) + self.assertEqual("New Key", field7.name) # generated + + def test_make_key(self): + self.assertEqual("first_name", ContactField.make_key("First Name")) + self.assertEqual("second_name", ContactField.make_key("Second Name ")) + self.assertEqual("caf", ContactField.make_key("café")) + self.assertEqual( + "323_ffsn_slfs_ksflskfs_fk_anfaddgas", + ContactField.make_key(" ^%$# %$$ $##323 ffsn slfs ksflskfs!!!! fk$%%%$$$anfaDDGAS ))))))))) "), + ) + + def test_is_valid_key(self): + self.assertTrue(ContactField.is_valid_key("age")) + self.assertTrue(ContactField.is_valid_key("age_now_2")) + self.assertTrue(ContactField.is_valid_key("email")) + self.assertFalse(ContactField.is_valid_key("Age")) # must be lowercase + self.assertFalse(ContactField.is_valid_key("age!")) # can't have punctuation + self.assertFalse(ContactField.is_valid_key("âge")) # a-z only + self.assertFalse(ContactField.is_valid_key("2up")) # can't start with a number + self.assertFalse(ContactField.is_valid_key("has")) # can't be reserved key + self.assertFalse(ContactField.is_valid_key("is")) + self.assertFalse(ContactField.is_valid_key("fields")) + self.assertFalse(ContactField.is_valid_key("urns")) + self.assertFalse(ContactField.is_valid_key("a" * 37)) # too long + + def test_is_valid_name(self): + self.assertTrue(ContactField.is_valid_name("Age")) + self.assertTrue(ContactField.is_valid_name("Age Now 2")) + self.assertFalse(ContactField.is_valid_name("Age_Now")) # can't have punctuation + self.assertFalse(ContactField.is_valid_name("âge")) # a-z only + + @mock_mailroom + def test_contact_field_list_sort_fields(self, mr_mocks): + url = reverse("contacts.contact_list") + self.login(self.admin) + + mr_mocks.contact_search("", contacts=[self.joe]) + mr_mocks.contact_search("Joe", contacts=[self.joe]) + + response = self.client.get("%s?sort_on=%s" % (url, str(self.contactfield_1.key))) + + self.assertEqual(response.context["sort_field"], str(self.contactfield_1.key)) + self.assertEqual(response.context["sort_direction"], "asc") + self.assertNotIn("search", response.context) + + response = self.client.get("%s?sort_on=-%s" % (url, str(self.contactfield_1.key))) + + self.assertEqual(response.context["sort_field"], str(self.contactfield_1.key)) + self.assertEqual(response.context["sort_direction"], "desc") + self.assertNotIn("search", response.context) + + response = self.client.get("%s?sort_on=%s" % (url, "created_on")) + + self.assertEqual(response.context["sort_field"], "created_on") + self.assertEqual(response.context["sort_direction"], "asc") + self.assertNotIn("search", response.context) + + response = self.client.get("%s?sort_on=-%s&search=Joe" % (url, "created_on")) + + self.assertEqual(response.context["sort_field"], "created_on") + self.assertEqual(response.context["sort_direction"], "desc") + self.assertIn("search", response.context) + + def test_view_updatepriority_valid(self): + org_fields = ContactField.user_fields.filter(org=self.org, is_active=True) + + self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) + + self.login(self.admin) + updatepriority_cf_url = reverse("contacts.contactfield_update_priority") + + # there should be no updates because CFs with ids do not exist + post_data = json.dumps({123_123: 1000, 123_124: 999, 123_125: 998}) + + response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["status"], "OK") + + self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) + + # build valid post data + post_data = json.dumps({cf.key: index for index, cf in enumerate(org_fields.order_by("id"))}) + + # try to update as admin2 + self.login(self.admin2) + response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") + + # nothing changed + self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) + + # then as real admin + self.login(self.admin) + response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["status"], "OK") + + self.assertListEqual([0, 1, 2], [cf.priority for cf in org_fields.order_by("id")]) + + def test_view_updatepriority_invalid(self): + org_fields = ContactField.user_fields.filter(org=self.org, is_active=True) + + self.assertListEqual([10, 0, 20], [cf.priority for cf in org_fields.order_by("id")]) + + self.login(self.admin) + updatepriority_cf_url = reverse("contacts.contactfield_update_priority") + + post_data = '{invalid_json": 123}' + + response = self.client.post(updatepriority_cf_url, post_data, content_type="application/json") + self.assertEqual(response.status_code, 400) + response_json = response.json() + self.assertEqual(response_json["status"], "ERROR") + self.assertEqual( + response_json["err_detail"], "Expecting property name enclosed in double quotes: line 1 column 2 (char 1)" + ) diff --git a/temba/contacts/tests/test_fieldcrudl.py b/temba/contacts/tests/test_fieldcrudl.py new file mode 100644 index 00000000000..d864bd64dee --- /dev/null +++ b/temba/contacts/tests/test_fieldcrudl.py @@ -0,0 +1,374 @@ +from django.test.utils import override_settings +from django.urls import reverse + +from temba.campaigns.models import Campaign, CampaignEvent +from temba.contacts.models import ContactField +from temba.tests import CRUDLTestMixin, TembaTest, mock_mailroom + + +class ContactFieldCRUDLTest(TembaTest, CRUDLTestMixin): + def setUp(self): + super().setUp() + + self.age = self.create_field("age", "Age", value_type="N", show_in_table=True) + self.gender = self.create_field("gender", "Gender", value_type="T") + self.state = self.create_field("state", "State", value_type="S") + + self.deleted = self.create_field("foo", "Foo") + self.deleted.is_active = False + self.deleted.save(update_fields=("is_active",)) + + self.other_org_field = self.create_field("other", "Other", org=self.org2) + + def test_create(self): + create_url = reverse("contacts.contactfield_create") + + self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) + + # for a deploy that doesn't have locations feature, don't show location field types + with override_settings(FEATURES={}): + response = self.assertCreateFetch( + create_url, + [self.editor, self.admin], + form_fields=["name", "value_type", "show_in_table", "agent_access"], + ) + self.assertEqual( + [("T", "Text"), ("N", "Number"), ("D", "Date & Time")], + response.context["form"].fields["value_type"].choices, + ) + + response = self.assertCreateFetch( + create_url, + [self.editor, self.admin], + form_fields=["name", "value_type", "show_in_table", "agent_access"], + ) + self.assertEqual( + [("T", "Text"), ("N", "Number"), ("D", "Date & Time"), ("S", "State"), ("I", "District"), ("W", "Ward")], + response.context["form"].fields["value_type"].choices, + ) + + # try to submit with empty name + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "", "value_type": "T", "show_in_table": True, "agent_access": "E"}, + form_errors={"name": "This field is required."}, + ) + + # try to submit with invalid name + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "???", "value_type": "T", "show_in_table": True, "agent_access": "E"}, + form_errors={"name": "Can only contain letters, numbers and hypens."}, + ) + + # try to submit with something that would be an invalid key + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "HAS", "value_type": "T", "show_in_table": True, "agent_access": "E"}, + form_errors={"name": "Can't be a reserved word."}, + ) + + # try to submit with name of existing field + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "AGE", "value_type": "N", "show_in_table": True, "agent_access": "E"}, + form_errors={"name": "Must be unique."}, + ) + + # submit with valid data + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Goats", "value_type": "N", "show_in_table": True, "agent_access": "E"}, + new_obj_query=ContactField.user_fields.filter( + org=self.org, name="Goats", value_type="N", show_in_table=True, agent_access="E" + ), + success_status=200, + ) + + # it's also ok to create a field with the same name as a deleted field + ContactField.user_fields.get(key="age").release(self.admin) + + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "N"}, + new_obj_query=ContactField.user_fields.filter( + org=self.org, name="Age", value_type="N", show_in_table=True, agent_access="N", is_active=True + ), + success_status=200, + ) + + # simulate an org which has reached the limit for fields + with override_settings(ORG_LIMIT_DEFAULTS={"fields": 2}): + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Sheep", "value_type": "T", "show_in_table": True, "agent_access": "E"}, + form_errors={ + "__all__": "This workspace has reached its limit of 2 fields. You must delete existing ones before you can create new ones." + }, + ) + + def test_update(self): + update_url = reverse("contacts.contactfield_update", args=[self.age.key]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + + # for a deploy that doesn't have locations feature, don't show location field types + with override_settings(FEATURES={}): + response = self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields={"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "V"}, + ) + self.assertEqual(3, len(response.context["form"].fields["value_type"].choices)) + + response = self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields={"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "V"}, + ) + self.assertEqual(6, len(response.context["form"].fields["value_type"].choices)) + + # try submit without change + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Age", "value_type": "N", "show_in_table": True, "agent_access": "V"}, + success_status=200, + ) + + # try to submit with empty name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "", "value_type": "N", "show_in_table": True, "agent_access": "V"}, + form_errors={"name": "This field is required."}, + object_unchanged=self.age, + ) + + # try to submit with invalid name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "???", "value_type": "N", "show_in_table": True, "agent_access": "V"}, + form_errors={"name": "Can only contain letters, numbers and hypens."}, + object_unchanged=self.age, + ) + + # try to submit with a name that is used by another field + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "GENDER", "value_type": "N", "show_in_table": True, "agent_access": "V"}, + form_errors={"name": "Must be unique."}, + object_unchanged=self.age, + ) + + # submit with different name, type and agent access + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Age In Years", "value_type": "T", "show_in_table": False, "agent_access": "E"}, + success_status=200, + ) + + self.age.refresh_from_db() + self.assertEqual("Age In Years", self.age.name) + self.assertEqual("T", self.age.value_type) + self.assertFalse(self.age.show_in_table) + self.assertEqual("E", self.age.agent_access) + + # simulate an org which has reached the limit for fields - should still be able to update a field + with override_settings(ORG_LIMIT_DEFAULTS={"fields": 2}): + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Age 2", "value_type": "T", "show_in_table": True, "agent_access": "E"}, + success_status=200, + ) + + self.age.refresh_from_db() + self.assertEqual("Age 2", self.age.name) + + # create a date field used in a campaign event + registered = self.create_field("registered", "Registered", value_type="D") + campaign = Campaign.create(self.org, self.admin, "Reminders", self.create_group("Farmers")) + CampaignEvent.create_flow_event( + self.org, self.admin, campaign, registered, offset=1, unit="W", flow=self.create_flow("Test") + ) + + update_url = reverse("contacts.contactfield_update", args=[registered.key]) + + self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields={"name": "Registered", "value_type": "D", "show_in_table": False, "agent_access": "V"}, + ) + + # try to submit with different type + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Registered", "value_type": "T", "show_in_table": False, "agent_access": "V"}, + form_errors={"value_type": "Can't change type of date field being used by campaign events."}, + object_unchanged=registered, + ) + + # submit with only a different name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Registered On", "value_type": "D", "show_in_table": False, "agent_access": "V"}, + success_status=200, + ) + + registered.refresh_from_db() + self.assertEqual("Registered On", registered.name) + self.assertEqual("D", registered.value_type) + self.assertFalse(registered.show_in_table) + + def test_list(self): + list_url = reverse("contacts.contactfield_list") + + self.assertRequestDisallowed(list_url, [None, self.agent]) + self.assertListFetch( + list_url, [self.user, self.editor, self.admin], context_objects=[self.age, self.gender, self.state] + ) + self.assertContentMenu(list_url, self.user, []) + self.assertContentMenu(list_url, self.admin, ["New"]) + + def test_create_warnings(self): + self.login(self.admin) + create_url = reverse("contacts.contactfield_create") + response = self.client.get(create_url) + + self.assertEqual(3, response.context["total_count"]) + self.assertEqual(250, response.context["total_limit"]) + self.assertNotContains(response, "You have reached the limit") + self.assertNotContains(response, "You are approaching the limit") + + with override_settings(ORG_LIMIT_DEFAULTS={"fields": 10}): + response = self.requestView(create_url, self.admin) + + self.assertContains(response, "You are approaching the limit") + + with override_settings(ORG_LIMIT_DEFAULTS={"fields": 3}): + response = self.requestView(create_url, self.admin) + + self.assertContains(response, "You have reached the limit") + + @mock_mailroom + def test_usages(self, mr_mocks): + flow = self.get_flow("dependencies", name="Dependencies") + field = ContactField.user_fields.filter(is_active=True, org=self.org, key="favorite_cat").get() + field.value_type = ContactField.TYPE_DATETIME + field.save(update_fields=("value_type",)) + + group = self.create_group("Farmers", query='favorite_cat != ""') + campaign = Campaign.create(self.org, self.admin, "Planting Reminders", group) + + # create flow events + event1 = CampaignEvent.create_flow_event( + self.org, + self.admin, + campaign, + relative_to=field, + offset=0, + unit="D", + flow=flow, + delivery_hour=17, + ) + inactive_campaignevent = CampaignEvent.create_flow_event( + self.org, + self.admin, + campaign, + relative_to=field, + offset=0, + unit="D", + flow=flow, + delivery_hour=20, + ) + inactive_campaignevent.is_active = False + inactive_campaignevent.save(update_fields=("is_active",)) + + usages_url = reverse("contacts.contactfield_usages", args=[field.key]) + + self.assertRequestDisallowed(usages_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(usages_url, [self.user, self.editor, self.admin], context_object=field) + + self.assertEqual( + {"flow": [flow], "group": [group], "campaign_event": [event1]}, + {t: list(qs) for t, qs in response.context["dependents"].items()}, + ) + + def test_delete(self): + # create new field 'Joined On' which is used by a campaign event (soft) and a flow (soft) + group = self.create_group("Amazing Group", contacts=[]) + joined_on = self.create_field("joined_on", "Joined On", value_type=ContactField.TYPE_DATETIME) + campaign = Campaign.create(self.org, self.admin, Campaign.get_unique_name(self.org, "Reminders"), group) + flow = self.create_flow("Amazing Flow") + flow.field_dependencies.add(joined_on) + campaign_event = CampaignEvent.create_flow_event( + self.org, self.admin, campaign, joined_on, offset=1, unit="W", flow=flow, delivery_hour=13 + ) + + # make 'Age' appear to be used by a flow (soft) and a group (hard) + flow.field_dependencies.add(self.age) + group.query_fields.add(self.age) + + delete_gender_url = reverse("contacts.contactfield_delete", args=[self.gender.key]) + delete_joined_url = reverse("contacts.contactfield_delete", args=[joined_on.key]) + delete_age_url = reverse("contacts.contactfield_delete", args=[self.age.key]) + + self.assertRequestDisallowed(delete_gender_url, [None, self.user, self.agent, self.admin2]) + + # a field with no dependents can be deleted + response = self.assertDeleteFetch(delete_gender_url, [self.editor, self.admin]) + self.assertEqual({}, response.context["soft_dependents"]) + self.assertEqual({}, response.context["hard_dependents"]) + self.assertContains(response, "You are about to delete") + self.assertContains(response, "There is no way to undo this. Are you sure?") + + self.assertDeleteSubmit(delete_gender_url, self.admin, object_deactivated=self.gender, success_status=200) + + # create the same field again + self.gender = self.create_field("gender", "Gender", value_type="T") + + # since fields are queried by key name, try and delete it again + # to make sure we aren't deleting the previous deleted field again + self.assertDeleteSubmit(delete_gender_url, self.admin, object_deactivated=self.gender, success_status=200) + self.gender.refresh_from_db() + self.assertFalse(self.gender.is_active) + + # a field with only soft dependents can also be deleted but we give warnings + response = self.assertDeleteFetch(delete_joined_url, [self.admin]) + self.assertEqual({"flow", "campaign_event"}, set(response.context["soft_dependents"].keys())) + self.assertEqual({}, response.context["hard_dependents"]) + self.assertContains(response, "is used by the following items but can still be deleted:") + self.assertContains(response, "Amazing Flow") + self.assertContains(response, "There is no way to undo this. Are you sure?") + + self.assertDeleteSubmit(delete_joined_url, self.admin, object_deactivated=joined_on, success_status=200) + + # check that flow is now marked as having issues + flow.refresh_from_db() + self.assertTrue(flow.has_issues) + self.assertNotIn(joined_on, flow.field_dependencies.all()) + + # and that the campaign event is gone + campaign_event.refresh_from_db() + self.assertFalse(campaign_event.is_active) + + # a field with hard dependents can't be deleted + response = self.assertDeleteFetch(delete_age_url, [self.admin]) + self.assertEqual({"flow"}, set(response.context["soft_dependents"].keys())) + self.assertEqual({"group"}, set(response.context["hard_dependents"].keys())) + self.assertContains(response, "can't be deleted as it is still used by the following items:") + self.assertContains(response, "Amazing Group") + self.assertNotContains(response, "Delete") diff --git a/temba/contacts/tests/test_group.py b/temba/contacts/tests/test_group.py new file mode 100644 index 00000000000..6db52955e17 --- /dev/null +++ b/temba/contacts/tests/test_group.py @@ -0,0 +1,289 @@ +from datetime import timedelta + +from django.urls import reverse +from django.utils import timezone + +from temba import mailroom +from temba.campaigns.models import Campaign, CampaignEvent, EventFire +from temba.contacts.models import Contact, ContactField, ContactGroup, ContactGroupCount +from temba.contacts.tasks import squash_group_counts +from temba.schedules.models import Schedule +from temba.tests import TembaTest, mock_mailroom + + +class ContactGroupTest(TembaTest): + def setUp(self): + super().setUp() + + self.joe = self.create_contact("Joe Blow", phone="123", fields={"age": "17", "gender": "male"}) + self.frank = self.create_contact("Frank Smith", phone="1234") + self.mary = self.create_contact("Mary Mo", phone="345", fields={"age": "21", "gender": "female"}) + + def test_create_manual(self): + group = ContactGroup.create_manual(self.org, self.admin, "group one") + + self.assertEqual(group.org, self.org) + self.assertEqual(group.name, "group one") + self.assertEqual(group.created_by, self.admin) + self.assertEqual(group.status, ContactGroup.STATUS_READY) + + # can't call update_query on a manual group + self.assertRaises(AssertionError, group.update_query, "gender=M") + + # assert failure if group name is blank + self.assertRaises(AssertionError, ContactGroup.create_manual, self.org, self.admin, " ") + + @mock_mailroom + def test_create_smart(self, mr_mocks): + age = self.org.fields.get(key="age") + gender = self.org.fields.get(key="gender") + + # create a dynamic group using a query + query = '(Age < 18 and gender = "male") or (Age > 18 and gender = "female")' + + group = ContactGroup.create_smart(self.org, self.admin, "Group two", query) + group.refresh_from_db() + + self.assertEqual(query, group.query) + self.assertEqual({age, gender}, set(group.query_fields.all())) + self.assertEqual(ContactGroup.STATUS_INITIALIZING, group.status) + + # update group query + mr_mocks.contact_parse_query("age > 18 and name ~ Mary", cleaned='age > 18 AND name ~ "Mary"') + group.update_query("age > 18 and name ~ Mary") + group.refresh_from_db() + + self.assertEqual(group.query, 'age > 18 AND name ~ "Mary"') + self.assertEqual(set(group.query_fields.all()), {age}) + self.assertEqual(group.status, ContactGroup.STATUS_INITIALIZING) + + # try to update group query to something invalid + mr_mocks.exception(mailroom.QueryValidationException("no valid", "syntax")) + with self.assertRaises(ValueError): + group.update_query("age ~ Mary") + + # can't create a dynamic group with empty query + self.assertRaises(AssertionError, ContactGroup.create_smart, self.org, self.admin, "Empty", "") + + # can't create a dynamic group with id attribute + self.assertRaises(ValueError, ContactGroup.create_smart, self.org, self.admin, "Bose", "id = 123") + + # dynamic group should not have remove to group button + self.login(self.admin) + filter_url = reverse("contacts.contact_group", args=[group.uuid]) + self.client.get(filter_url) + + # put group back into evaluation state + group.status = ContactGroup.STATUS_EVALUATING + group.save(update_fields=("status",)) + + # dynamic groups should get their own icon + self.assertEqual(group.get_attrs(), {"icon": "group_smart"}) + + # can't update query again while it is in this state + with self.assertRaises(AssertionError): + group.update_query("age = 18") + + def test_get_or_create(self): + group = ContactGroup.get_or_create(self.org, self.user, "first") + self.assertEqual(group.name, "first") + self.assertFalse(group.is_smart) + + # name look up is case insensitive + self.assertEqual(ContactGroup.get_or_create(self.org, self.user, "FIRST"), group) + + # fetching by id shouldn't modify original group + self.assertEqual(ContactGroup.get_or_create(self.org, self.user, "Kigali", uuid=group.uuid), group) + + group.refresh_from_db() + self.assertEqual(group.name, "first") + + @mock_mailroom + def test_get_groups(self, mr_mocks): + manual = ContactGroup.create_manual(self.org, self.admin, "Static") + deleted = ContactGroup.create_manual(self.org, self.admin, "Deleted") + deleted.is_active = False + deleted.save() + + open_tickets = self.org.groups.get(name="Open Tickets") + females = ContactGroup.create_smart(self.org, self.admin, "Females", "gender=F") + males = ContactGroup.create_smart(self.org, self.admin, "Males", "gender=M") + ContactGroup.objects.filter(id=males.id).update(status=ContactGroup.STATUS_READY) + + self.assertEqual(set(ContactGroup.get_groups(self.org)), {open_tickets, manual, females, males}) + self.assertEqual(set(ContactGroup.get_groups(self.org, manual_only=True)), {manual}) + self.assertEqual(set(ContactGroup.get_groups(self.org, ready_only=True)), {open_tickets, manual, males}) + + def test_get_unique_name(self): + self.assertEqual("Testers", ContactGroup.get_unique_name(self.org, "Testers")) + + # ensure checking against existing groups is case-insensitive + self.create_group("TESTERS", contacts=[]) + + self.assertEqual("Testers 2", ContactGroup.get_unique_name(self.org, "Testers")) + self.assertEqual("Testers", ContactGroup.get_unique_name(self.org2, "Testers")) # different org + + self.create_group("Testers 2", contacts=[]) + + self.assertEqual("Testers 3", ContactGroup.get_unique_name(self.org, "Testers")) + + # ensure we don't exceed the name length limit + self.create_group("X" * 64, contacts=[]) + + self.assertEqual(f"{'X' * 62} 2", ContactGroup.get_unique_name(self.org, "X" * 64)) + + @mock_mailroom + def test_member_count(self, mr_mocks): + group = self.create_group("Cool kids") + group.contacts.add(self.joe, self.frank) + + self.assertEqual(ContactGroup.objects.get(pk=group.pk).get_member_count(), 2) + + group.contacts.add(self.mary) + + self.assertEqual(ContactGroup.objects.get(pk=group.pk).get_member_count(), 3) + + group.contacts.remove(self.mary) + + self.assertEqual(ContactGroup.objects.get(pk=group.pk).get_member_count(), 2) + + # blocking a contact removes them from all user groups + self.joe.block(self.user) + + group = ContactGroup.objects.get(pk=group.pk) + self.assertEqual(group.get_member_count(), 1) + self.assertEqual(set(group.contacts.all()), {self.frank}) + + # releasing removes from all user groups + self.frank.release(self.user) + + group = ContactGroup.objects.get(pk=group.pk) + self.assertEqual(group.get_member_count(), 0) + self.assertEqual(set(group.contacts.all()), set()) + + @mock_mailroom + def test_status_group_counts(self, mr_mocks): + # start with no contacts + for contact in Contact.objects.all(): + contact.release(self.admin) + contact.delete() + + counts = Contact.get_status_counts(self.org) + self.assertEqual( + counts, + { + Contact.STATUS_ACTIVE: 0, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + self.create_contact("Hannibal", phone="0783835001") + face = self.create_contact("Face", phone="0783835002") + ba = self.create_contact("B.A.", phone="0783835003") + murdock = self.create_contact("Murdock", phone="0783835004") + + counts = Contact.get_status_counts(self.org) + self.assertEqual( + counts, + { + Contact.STATUS_ACTIVE: 4, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + # call methods twice to check counts don't change twice + murdock.block(self.user) + murdock.block(self.user) + face.block(self.user) + ba.stop(self.user) + ba.stop(self.user) + + counts = Contact.get_status_counts(self.org) + self.assertEqual( + counts, + { + Contact.STATUS_ACTIVE: 1, + Contact.STATUS_BLOCKED: 2, + Contact.STATUS_STOPPED: 1, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + # squash all our counts, this shouldn't affect our overall counts, but we should now only have 3 + squash_group_counts() + self.assertEqual(ContactGroupCount.objects.all().count(), 3) + + murdock.release(self.user) + murdock.release(self.user) + face.restore(self.user) + face.restore(self.user) + ba.restore(self.user) + ba.restore(self.user) + + # squash again, this time we discard zero counts + squash_group_counts() + self.assertEqual(ContactGroupCount.objects.all().count(), 1) + + counts = Contact.get_status_counts(self.org) + self.assertEqual( + counts, + { + Contact.STATUS_ACTIVE: 3, + Contact.STATUS_BLOCKED: 0, + Contact.STATUS_STOPPED: 0, + Contact.STATUS_ARCHIVED: 0, + }, + ) + + @mock_mailroom + def test_release(self, mr_mocks): + contact1 = self.create_contact("Bob", phone="+1234567111") + contact2 = self.create_contact("Jim", phone="+1234567222") + contact3 = self.create_contact("Jim", phone="+1234567333") + group1 = self.create_group("Group One", contacts=[contact1, contact2]) + group2 = self.create_group("Group One", contacts=[contact2, contact3]) + + t1 = timezone.now() + + # create a campaign based on group 1 - a hard dependency + campaign = Campaign.create(self.org, self.admin, "Reminders", group1) + joined = self.create_field("joined", "Joined On", value_type=ContactField.TYPE_DATETIME) + event = CampaignEvent.create_message_event(self.org, self.admin, campaign, joined, 2, unit="D", message="Hi") + EventFire.objects.create(event=event, contact=self.joe, scheduled=timezone.now() + timedelta(days=2)) + campaign.is_archived = True + campaign.save() + + # create scheduled and regular broadcasts which send to both groups + schedule = Schedule.create(self.org, timezone.now(), Schedule.REPEAT_DAILY) + bcast1 = self.create_broadcast(self.admin, {"eng": {"text": "Hi"}}, groups=[group1, group2], schedule=schedule) + bcast2 = self.create_broadcast(self.admin, {"eng": {"text": "Hi"}}, groups=[group1, group2]) + + # group still has a hard dependency so can't be released + with self.assertRaises(AssertionError): + group1.release(self.admin) + + campaign.delete() + + group1.release(self.admin) + group1.refresh_from_db() + + self.assertFalse(group1.is_active) + self.assertTrue(group1.name.startswith("deleted-")) + self.assertEqual(0, EventFire.objects.count()) # event fires will have been deleted + self.assertEqual({group2}, set(bcast1.groups.all())) # removed from scheduled broadcast + self.assertEqual({group1, group2}, set(bcast2.groups.all())) # regular broadcast unchanged + + self.assertEqual(set(), set(group1.contacts.all())) + self.assertEqual({contact2, contact3}, set(group2.contacts.all())) # unchanged + + # check that contacts who were in the group have had their modified_on times updated + contact1.refresh_from_db() + contact2.refresh_from_db() + contact3.refresh_from_db() + self.assertGreater(contact1.modified_on, t1) + self.assertGreater(contact2.modified_on, t1) + self.assertLess(contact3.modified_on, t1) # unchanged diff --git a/temba/contacts/tests/test_groupcrudl.py b/temba/contacts/tests/test_groupcrudl.py new file mode 100644 index 00000000000..3f208750618 --- /dev/null +++ b/temba/contacts/tests/test_groupcrudl.py @@ -0,0 +1,362 @@ +from datetime import timedelta + +from django.test.utils import override_settings +from django.urls import reverse +from django.utils import timezone + +from temba import mailroom +from temba.campaigns.models import Campaign +from temba.contacts.models import ContactGroup +from temba.schedules.models import Schedule +from temba.tests import CRUDLTestMixin, TembaTest, mock_mailroom +from temba.triggers.models import Trigger + + +class ContactGroupCRUDLTest(TembaTest, CRUDLTestMixin): + def setUp(self): + super().setUp() + + self.joe = self.create_contact("Joe Blow", phone="123") + self.frank = self.create_contact("Frank Smith", urns=["tel:1234", "twitter:hola"]) + + self.joe_and_frank = self.create_group("Customers", [self.joe, self.frank]) + + self.other_org_group = self.create_group("Customers", contacts=[], org=self.org2) + + @override_settings(ORG_LIMIT_DEFAULTS={"groups": 10}) + @mock_mailroom + def test_create(self, mr_mocks): + url = reverse("contacts.contactgroup_create") + + # can't create group as viewer + self.login(self.user) + response = self.client.post(url, {"name": "Spammers"}) + self.assertLoginRedirect(response) + + self.login(self.admin) + + # try to create a contact group whose name is only whitespace + response = self.client.post(url, {"name": " "}) + self.assertFormError(response.context["form"], "name", "This field is required.") + + # try to create a contact group whose name contains a disallowed character + response = self.client.post(url, {"name": '"People"'}) + self.assertFormError(response.context["form"], "name", 'Cannot contain the character: "') + + # try to create a contact group whose name is too long + response = self.client.post(url, {"name": "X" * 65}) + self.assertFormError( + response.context["form"], "name", "Ensure this value has at most 64 characters (it has 65)." + ) + + # try to create with name that's already taken + response = self.client.post(url, {"name": "Customers"}) + self.assertFormError(response.context["form"], "name", "Already used by another group.") + + # create with valid name (that will be trimmed) + response = self.client.post(url, {"name": "first "}) + self.assertNoFormErrors(response) + ContactGroup.objects.get(org=self.org, name="first") + + # create a group with preselected contacts + self.client.post(url, {"name": "Everybody", "preselected_contacts": f"{self.joe.id},{self.frank.id}"}) + group = ContactGroup.objects.get(org=self.org, name="Everybody") + self.assertEqual(set(group.contacts.all()), {self.joe, self.frank}) + + # create a dynamic group using a query + self.client.post(url, {"name": "Frank", "group_query": "tel = 1234"}) + + ContactGroup.objects.get(org=self.org, name="Frank", query="tel = 1234") + + for group in ContactGroup.objects.filter(is_system=False): + group.release(self.admin) + + for i in range(10): + ContactGroup.create_manual(self.org2, self.admin2, "group%d" % i) + + response = self.client.post(url, {"name": "People"}) + self.assertNoFormErrors(response) + ContactGroup.objects.get(org=self.org, name="People") + + for group in ContactGroup.objects.filter(is_system=False): + group.release(self.admin) + + for i in range(10): + ContactGroup.create_manual(self.org, self.admin, "group%d" % i) + + self.assertEqual(10, ContactGroup.objects.filter(is_active=True, is_system=False).count()) + response = self.client.post(url, {"name": "People"}) + self.assertFormError( + response.context["form"], + "name", + "This workspace has reached its limit of 10 groups. You must delete existing ones before you can create new ones.", + ) + + def test_create_disallow_duplicates(self): + self.login(self.admin) + + self.client.post(reverse("contacts.contactgroup_create"), dict(name="First Group")) + + # assert it was created + ContactGroup.objects.get(name="First Group") + + # try to create another group with the same name, but a dynamic query, should fail + response = self.client.post( + reverse("contacts.contactgroup_create"), dict(name="First Group", group_query="firsts") + ) + self.assertFormError(response.context["form"], "name", "Already used by another group.") + + # try to create another group with same name, not dynamic, same thing + response = self.client.post( + reverse("contacts.contactgroup_create"), dict(name="First Group", group_query="firsts") + ) + self.assertFormError(response.context["form"], "name", "Already used by another group.") + + @mock_mailroom + def test_update(self, mr_mocks): + manual = self.create_group("Customers", [self.joe, self.frank]) + smart = self.create_group("Dynamic", query="tel is 1234") + open_tickets = self.org.groups.get(name="Open Tickets") + + update_url = reverse("contacts.contactgroup_update", args=[manual.id]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + + self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=("name",)) + + # try to update name to only whitespace + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": " "}, + form_errors={"name": "This field is required."}, + object_unchanged=manual, + ) + + # try to update name to contain a disallowed character + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": '"People"'}, + form_errors={"name": 'Cannot contain the character: "'}, + object_unchanged=manual, + ) + + # update with valid name (that will be trimmed) + self.assertUpdateSubmit(update_url, self.admin, {"name": "new name "}) + + manual.refresh_from_db() + self.assertEqual(manual.name, "new name") + + # now try a smart group + update_url = reverse("contacts.contactgroup_update", args=[smart.id]) + + # mark our group as ready + smart.status = ContactGroup.STATUS_READY + smart.save(update_fields=("status",)) + + self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=("name", "query")) + + # simulate submitting an unparseable query + mr_mocks.exception(mailroom.QueryValidationException("error at !", "syntax")) + + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Frank", "query": "(!))!)"}, + form_errors={"query": "Invalid query syntax."}, + object_unchanged=smart, + ) + + # or a query that depends on id + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Frank", "query": "id = 123"}, + form_errors={"query": 'You cannot create a smart group based on "id" or "group".'}, + object_unchanged=smart, + ) + + # update with valid query + self.assertUpdateSubmit(update_url, self.admin, {"name": "Frank", "query": 'twitter = "hola"'}) + + smart.refresh_from_db() + self.assertEqual(smart.query, 'twitter = "hola"') + + # mark our dynamic group as evaluating + smart.status = ContactGroup.STATUS_EVALUATING + smart.save(update_fields=("status",)) + + # and check we can't change the query while that is the case + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Frank", "query": 'twitter = "hello"'}, + form_errors={"query": "You cannot update the query of a group that is populating."}, + object_unchanged=smart, + ) + + # but can change the name + self.assertUpdateSubmit(update_url, self.admin, {"name": "Frank2", "query": 'twitter = "hola"'}) + + smart.refresh_from_db() + self.assertEqual(smart.name, "Frank2") + + # try to update a system group + response = self.requestView(reverse("contacts.contactgroup_update", args=[open_tickets.id]), self.admin) + self.assertEqual(404, response.status_code) + + def test_usages(self): + flow = self.get_flow("dependencies", name="Dependencies") + group = ContactGroup.objects.get(name="Cat Facts") + + campaign1 = Campaign.create(self.org, self.admin, "Planting Reminders", group) + campaign2 = Campaign.create(self.org, self.admin, "Deleted", group) + campaign2.is_active = False + campaign2.save(update_fields=("is_active",)) + + trigger1 = Trigger.create( + self.org, + self.admin, + Trigger.TYPE_KEYWORD, + flow, + keywords=["test1"], + match_type=Trigger.MATCH_FIRST_WORD, + groups=[group], + ) + trigger2 = Trigger.create( + self.org, + self.admin, + Trigger.TYPE_KEYWORD, + flow, + keywords=["test2"], + match_type=Trigger.MATCH_FIRST_WORD, + exclude_groups=[group], + ) + + usages_url = reverse("contacts.contactgroup_usages", args=[group.uuid]) + + self.assertRequestDisallowed(usages_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(usages_url, [self.user, self.editor, self.admin], context_object=group) + + self.assertEqual( + {"flow": [flow], "campaign": [campaign1], "trigger": [trigger1, trigger2]}, + {t: list(qs) for t, qs in response.context["dependents"].items()}, + ) + + def test_delete(self): + # create a group which isn't used by anything + group1 = self.create_group("Group 1", contacts=[]) + + # create a group which is used only by a flow (soft dependency) + group2 = self.create_group("Group 2", contacts=[]) + flow1 = self.create_flow("Flow 1") + flow1.group_dependencies.add(group2) + + # create a group which is used by a flow (soft) and a scheduled trigger (soft) + group3 = self.create_group("Group 3", contacts=[]) + flow2 = self.create_flow("Flow 2") + flow2.group_dependencies.add(group3) + schedule1 = Schedule.create(self.org, timezone.now() + timedelta(days=3), Schedule.REPEAT_DAILY) + trigger1 = Trigger.create( + self.org, + self.admin, + trigger_type=Trigger.TYPE_SCHEDULE, + flow=flow2, + keywords=["trigger1"], + match_type=Trigger.MATCH_FIRST_WORD, + groups=[group3.id], + schedule=schedule1, + ) + self.assertEqual(1, group3.triggers.count()) + self.assertEqual(trigger1, group3.triggers.get(is_active=True, keywords=trigger1.keywords)) + + # create a group which is used by a flow (soft), a trigger (soft), and a campaign (hard dependency) + group4 = self.create_group("Group 4", contacts=[]) + flow3 = self.create_flow("Flow 3") + flow3.group_dependencies.add(group4) + trigger2 = Trigger.create( + self.org, + self.admin, + Trigger.TYPE_KEYWORD, + flow3, + keywords=["trigger2"], + match_type=Trigger.MATCH_FIRST_WORD, + groups=[group4], + ) + campaign1 = Campaign.create(self.org, self.admin, "Planting Reminders", group4) + + delete_group1_url = reverse("contacts.contactgroup_delete", args=[group1.uuid]) + delete_group2_url = reverse("contacts.contactgroup_delete", args=[group2.uuid]) + delete_group3_url = reverse("contacts.contactgroup_delete", args=[group3.uuid]) + delete_group4_url = reverse("contacts.contactgroup_delete", args=[group4.uuid]) + + self.assertRequestDisallowed(delete_group1_url, [None, self.user, self.agent, self.admin2]) + + # a group with no dependents can be deleted + response = self.assertDeleteFetch(delete_group1_url, [self.editor, self.admin]) + + self.assertEqual({}, response.context["soft_dependents"]) + self.assertEqual({}, response.context["hard_dependents"]) + self.assertContains(response, "You are about to delete") + self.assertContains(response, "There is no way to undo this. Are you sure?") + + self.assertDeleteSubmit(delete_group1_url, self.admin, object_deactivated=group1, success_status=200) + + # a group with only soft dependents can be deleted but we give warnings + response = self.assertDeleteFetch(delete_group2_url, [self.editor]) + + self.assertEqual({"flow"}, set(response.context["soft_dependents"].keys())) + self.assertEqual({}, response.context["hard_dependents"]) + self.assertContains(response, "is used by the following items but can still be deleted:") + self.assertContains(response, flow1.name) + self.assertContains(response, "There is no way to undo this. Are you sure?") + + self.assertDeleteSubmit(delete_group2_url, self.admin, object_deactivated=group2, success_status=200) + + # check that the flow is now marked as having issues + flow1.refresh_from_db() + self.assertTrue(flow1.has_issues) + self.assertNotIn(group2, flow1.field_dependencies.all()) + + # a group with only soft dependents can be deleted but we give warnings + response = self.assertDeleteFetch(delete_group3_url, [self.admin]) + + self.assertEqual({"flow", "trigger"}, set(response.context["soft_dependents"].keys())) + self.assertEqual({}, response.context["hard_dependents"]) + self.assertContains(response, "is used by the following items but can still be deleted:") + self.assertContains(response, flow2.name) + self.assertContains(response, f"Schedule → {flow2.name}") + self.assertContains(response, "There is no way to undo this. Are you sure?") + + self.assertDeleteSubmit(delete_group3_url, self.admin, object_deactivated=group3, success_status=200) + + # check that the flow is now marked as having issues + flow2.refresh_from_db() + self.assertTrue(flow2.has_issues) + self.assertNotIn(group3, flow2.field_dependencies.all()) + + # check that the trigger is released + trigger1.refresh_from_db() + self.assertFalse(trigger1.is_active) + + # a group with hard dependents can't be deleted + response = self.assertDeleteFetch(delete_group4_url, [self.admin]) + + self.assertEqual({"flow", "trigger"}, set(response.context["soft_dependents"].keys())) + self.assertEqual({"campaign"}, set(response.context["hard_dependents"].keys())) + self.assertContains(response, "can't be deleted as it is still used by the following items:") + self.assertContains(response, campaign1.name) + self.assertNotContains(response, "Delete") + + # check that the flow is not deleted + flow3.refresh_from_db() + self.assertTrue(flow3.is_active) + + # check that the trigger is not released + trigger2.refresh_from_db() + self.assertTrue(trigger2.is_active) + + # check that the campaign is not deleted + campaign1.refresh_from_db() + self.assertTrue(campaign1.is_active) diff --git a/temba/contacts/tests/test_import.py b/temba/contacts/tests/test_import.py new file mode 100644 index 00000000000..0df5c7a3a3c --- /dev/null +++ b/temba/contacts/tests/test_import.py @@ -0,0 +1,615 @@ +from datetime import date, datetime +from unittest.mock import patch +from zoneinfo import ZoneInfo + +from django.core.validators import ValidationError +from django.utils import timezone + +from temba.contacts.models import ContactField, ContactImport, ContactImportBatch +from temba.tests import TembaTest, matchers, mock_mailroom + + +class ContactImportTest(TembaTest): + def test_parse_errors(self): + # try to open an import that is completely empty + with self.assertRaisesRegex(ValidationError, "Import file appears to be empty."): + path = "media/test_imports/empty_all_rows.xlsx" # No header row present either + with open(path, "rb") as f: + ContactImport.try_to_parse(self.org, f, path) + + def try_to_parse(name): + path = f"media/test_imports/{name}" + with open(path, "rb") as f: + ContactImport.try_to_parse(self.org, f, path) + + # try to open an import that exceeds the record limit + with patch("temba.contacts.models.ContactImport.MAX_RECORDS", 2): + with self.assertRaisesRegex(ValidationError, r"Import files can contain a maximum of 2 records\."): + try_to_parse("simple.xlsx") + + bad_files = [ + ("empty.xlsx", "Import file doesn't contain any records."), + ("empty_header.xlsx", "Import file contains an empty header."), + ("duplicate_urn.xlsx", "Import file contains duplicated contact URN 'tel:+250788382382' on row 4."), + ( + "duplicate_uuid.xlsx", + "Import file contains duplicated contact UUID 'f519ca1f-8513-49ba-8896-22bf0420dec7' on row 4.", + ), + ("invalid_scheme.xlsx", "Header 'URN:XXX' is not a valid URN type."), + ("invalid_field_key.xlsx", "Header 'Field: #$^%' is not a valid field name."), + ("reserved_field_key.xlsx", "Header 'Field:HAS' is not a valid field name."), + ("no_urn_or_uuid.xlsx", "Import files must contain either UUID or a URN header."), + ("uuid_only.xlsx", "Import files must contain columns besides UUID."), + ("invalid.txt.xlsx", "Import file appears to be corrupted."), + ] + + for imp_file, imp_error in bad_files: + with self.assertRaises(ValidationError, msg=f"expected error in {imp_file}") as e: + try_to_parse(imp_file) + self.assertEqual(imp_error, e.exception.messages[0], f"error mismatch for {imp_file}") + + def test_extract_mappings(self): + # try simple import in different formats + for ext in ("xlsx",): + imp = self.create_contact_import(f"media/test_imports/simple.{ext}") + self.assertEqual(3, imp.num_records) + self.assertEqual( + [ + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "name", "mapping": {"type": "attribute", "name": "name"}}, + ], + imp.mappings, + ) + + # try import with 2 URN types + imp = self.create_contact_import("media/test_imports/twitter_and_phone.xlsx") + self.assertEqual( + [ + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "name", "mapping": {"type": "attribute", "name": "name"}}, + {"header": "URN:Twitter", "mapping": {"type": "scheme", "scheme": "twitter"}}, + ], + imp.mappings, + ) + + # or with 3 URN columns + imp = self.create_contact_import("media/test_imports/multiple_tel_urns.xlsx") + self.assertEqual( + [ + {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + ], + imp.mappings, + ) + + imp = self.create_contact_import("media/test_imports/missing_name_header.xlsx") + self.assertEqual([{"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}], imp.mappings) + + self.create_field("goats", "Num Goats", ContactField.TYPE_NUMBER) + + imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") + self.assertEqual( + [ + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, + {"header": "language", "mapping": {"type": "attribute", "name": "language"}}, + {"header": "Status", "mapping": {"type": "attribute", "name": "status"}}, + {"header": "Created On", "mapping": {"type": "ignore"}}, + { + "header": "field: goats", + "mapping": {"type": "field", "key": "goats", "name": "Num Goats"}, # matched by key + }, + { + "header": "Field:Sheep", + "mapping": {"type": "new_field", "key": "sheep", "name": "Sheep", "value_type": "T"}, + }, + {"header": "Group:Testers", "mapping": {"type": "ignore"}}, + ], + imp.mappings, + ) + + # it's possible for field keys and labels to be out of sync, in which case we match by label first because + # that's how we export contacts + self.create_field("num_goats", "Goats", ContactField.TYPE_NUMBER) + + imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") + self.assertEqual( + { + "header": "field: goats", + "mapping": {"type": "field", "key": "num_goats", "name": "Goats"}, # matched by label + }, + imp.mappings[5], + ) + + # a header can be a number but it will be ignored + imp = self.create_contact_import("media/test_imports/numerical_header.xlsx") + self.assertEqual( + [ + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "Name", "mapping": {"name": "name", "type": "attribute"}}, + {"header": "123", "mapping": {"type": "ignore"}}, + ], + imp.mappings, + ) + + self.create_field("a_number", "A-Number", ContactField.TYPE_NUMBER) + + imp = self.create_contact_import("media/test_imports/header_chars.xlsx") + self.assertEqual( + [ + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, + {"header": "Field: A-Number", "mapping": {"type": "field", "key": "a_number", "name": "A-Number"}}, + ], + imp.mappings, + ) + + @mock_mailroom + def test_batches(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/simple.xlsx") + self.assertEqual(3, imp.num_records) + self.assertIsNone(imp.started_on) + + # info can be fetched but it's empty + self.assertEqual( + {"status": "P", "num_created": 0, "num_updated": 0, "num_errored": 0, "errors": [], "time_taken": 0}, + imp.get_info(), + ) + + imp.start() + batches = list(imp.batches.order_by("id")) + + self.assertIsNotNone(imp.started_on) + self.assertEqual(1, len(batches)) + self.assertEqual(0, batches[0].record_start) + self.assertEqual(3, batches[0].record_end) + self.assertEqual( + [ + { + "_import_row": 2, + "name": "Eric Newcomer", + "urns": ["tel:+250788382382"], + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "name": "NIC POTTIER", + "urns": ["tel:+250788383383"], + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 4, + "name": "jen newcomer", + "urns": ["tel:+250788383385"], + "groups": [str(imp.group.uuid)], + }, + ], + batches[0].specs, + ) + + # check batch was queued for import by mailroom + self.assertEqual( + [ + { + "type": "import_contact_batch", + "org_id": self.org.id, + "task": {"contact_import_batch_id": batches[0].id}, + "queued_on": matchers.Datetime(), + }, + ], + mr_mocks.queued_batch_tasks, + ) + + # records are batched if they exceed batch size + with patch("temba.contacts.models.ContactImport.BATCH_SIZE", 2): + imp = self.create_contact_import("media/test_imports/simple.xlsx") + imp.start() + + batches = list(imp.batches.order_by("id")) + self.assertEqual(2, len(batches)) + self.assertEqual(0, batches[0].record_start) + self.assertEqual(2, batches[0].record_end) + self.assertEqual(2, batches[1].record_start) + self.assertEqual(3, batches[1].record_end) + + # info is calculated across all batches + self.assertEqual( + { + "status": "O", + "num_created": 0, + "num_updated": 0, + "num_errored": 0, + "errors": [], + "time_taken": matchers.Int(min=0), + }, + imp.get_info(), + ) + + # simulate mailroom starting to process first batch + imp.batches.filter(id=batches[0].id).update( + status="O", num_created=2, num_updated=1, errors=[{"record": 1, "message": "that's wrong"}] + ) + + self.assertEqual( + { + "status": "O", + "num_created": 2, + "num_updated": 1, + "num_errored": 0, + "errors": [{"record": 1, "message": "that's wrong"}], + "time_taken": matchers.Int(min=0), + }, + imp.get_info(), + ) + + # simulate mailroom completing first batch, starting second + imp.batches.filter(id=batches[0].id).update(status="C", finished_on=timezone.now()) + imp.batches.filter(id=batches[1].id).update( + status="O", num_created=3, num_updated=5, errors=[{"record": 3, "message": "that's not right"}] + ) + + self.assertEqual( + { + "status": "O", + "num_created": 5, + "num_updated": 6, + "num_errored": 0, + "errors": [{"record": 1, "message": "that's wrong"}, {"record": 3, "message": "that's not right"}], + "time_taken": matchers.Int(min=0), + }, + imp.get_info(), + ) + + # simulate mailroom completing second batch + imp.batches.filter(id=batches[1].id).update(status="C", finished_on=timezone.now()) + imp.status = "C" + imp.finished_on = timezone.now() + imp.save(update_fields=("finished_on", "status")) + + self.assertEqual( + { + "status": "C", + "num_created": 5, + "num_updated": 6, + "num_errored": 0, + "errors": [{"record": 1, "message": "that's wrong"}, {"record": 3, "message": "that's not right"}], + "time_taken": matchers.Int(min=0), + }, + imp.get_info(), + ) + + @mock_mailroom + def test_batches_with_fields(self, mr_mocks): + self.create_field("goats", "Goats", ContactField.TYPE_NUMBER) + + imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") + imp.start() + batch = imp.batches.get() # single batch + + self.assertEqual( + [ + { + "_import_row": 2, + "name": "John Doe", + "language": "eng", + "status": "archived", + "urns": ["tel:+250788123123"], + "fields": {"goats": "1", "sheep": "0"}, + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "name": "Mary Smith", + "language": "spa", + "status": "blocked", + "urns": ["tel:+250788456456"], + "fields": {"goats": "3", "sheep": "5"}, + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 4, + "urns": ["tel:+250788456678"], + "groups": [str(imp.group.uuid)], + }, # blank values ignored + ], + batch.specs, + ) + + imp = self.create_contact_import("media/test_imports/with_empty_rows.xlsx") + imp.start() + batch = imp.batches.get() # single batch + + # row 2 nad 3 is skipped + self.assertEqual( + [ + { + "_import_row": 2, + "name": "John Doe", + "language": "eng", + "urns": ["tel:+250788123123"], + "fields": {"goats": "1", "sheep": "0"}, + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 5, + "name": "Mary Smith", + "language": "spa", + "urns": ["tel:+250788456456"], + "fields": {"goats": "3", "sheep": "5"}, + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 6, + "urns": ["tel:+250788456678"], + "groups": [str(imp.group.uuid)], + }, # blank values ignored + ], + batch.specs, + ) + + imp = self.create_contact_import("media/test_imports/with_uuid.xlsx") + imp.start() + batch = imp.batches.get() + self.assertEqual( + [ + { + "_import_row": 2, + "uuid": "f519ca1f-8513-49ba-8896-22bf0420dec7", + "name": "Joe", + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "uuid": "989975f0-3bff-43d6-82c8-a6bbc201c938", + "name": "Frank", + "groups": [str(imp.group.uuid)], + }, + ], + batch.specs, + ) + + # cells with -- mean explicit clearing of those values + imp = self.create_contact_import("media/test_imports/explicit_clearing.xlsx") + imp.start() + batch = imp.batches.get() # single batch + + self.assertEqual( + { + "_import_row": 4, + "name": "", + "language": "", + "urns": ["tel:+250788456678"], + "fields": {"goats": "", "sheep": ""}, + "groups": [str(imp.group.uuid)], + }, + batch.specs[2], + ) + + # uuids and languages converted to lowercase, case in names is preserved + imp = self.create_contact_import("media/test_imports/uppercase.xlsx") + imp.start() + batch = imp.batches.get() + self.assertEqual( + [ + { + "_import_row": 2, + "uuid": "92faa753-6faa-474a-a833-788032d0b757", + "name": "Eric Newcomer", + "language": "eng", + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "uuid": "3c11ac1f-c869-4247-a73c-9b97bff61659", + "name": "NIC POTTIER", + "language": "spa", + "groups": [str(imp.group.uuid)], + }, + ], + batch.specs, + ) + + @mock_mailroom + def test_batches_with_invalid_urn(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/invalid_urn.xlsx") + imp.start() + batch = imp.batches.get() + + # invalid looking urns still passed to mailroom to decide how to handle them + self.assertEqual( + [ + {"_import_row": 2, "name": "Eric Newcomer", "urns": ["tel:+%3F"], "groups": [str(imp.group.uuid)]}, + { + "_import_row": 3, + "name": "Nic Pottier", + "urns": ["tel:2345678901234567890"], + "groups": [str(imp.group.uuid)], + }, + ], + batch.specs, + ) + + @mock_mailroom + def test_batches_with_multiple_tels(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/multiple_tel_urns.xlsx") + imp.start() + batch = imp.batches.get() + + self.assertEqual( + [ + { + "_import_row": 2, + "name": "Bob", + "urns": ["tel:+250788382001", "tel:+250788382002", "tel:+250788382003"], + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "name": "Jim", + "urns": ["tel:+250788382004", "tel:+250788382005"], + "groups": [str(imp.group.uuid)], + }, + ], + batch.specs, + ) + + @mock_mailroom + def test_batches_from_xlsx(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/simple.xlsx") + imp.start() + batch = imp.batches.get() + + self.assertEqual( + [ + { + "_import_row": 2, + "name": "Eric Newcomer", + "urns": ["tel:+250788382382"], + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "name": "NIC POTTIER", + "urns": ["tel:+250788383383"], + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 4, + "name": "jen newcomer", + "urns": ["tel:+250788383385"], + "groups": [str(imp.group.uuid)], + }, + ], + batch.specs, + ) + + @mock_mailroom + def test_batches_from_xlsx_with_formulas(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/formula_data.xlsx") + imp.start() + batch = imp.batches.get() + + self.assertEqual( + [ + { + "_import_row": 2, + "fields": {"team": "Managers"}, + "name": "John Smith", + "urns": ["tel:+12025550199"], + "groups": [str(imp.group.uuid)], + }, + { + "_import_row": 3, + "fields": {"team": "Advisors"}, + "name": "Mary Green", + "urns": ["tel:+14045550178"], + "groups": [str(imp.group.uuid)], + }, + ], + batch.specs, + ) + + @mock_mailroom + def test_detect_spamminess(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/sequential_tels.xlsx") + imp.start() + + self.org.refresh_from_db() + self.assertTrue(self.org.is_flagged) + + with patch("temba.contacts.models.ContactImport.SEQUENTIAL_URNS_THRESHOLD", 3): + self.assertFalse(ContactImport._detect_spamminess(["tel:+593979000001", "tel:+593979000002"])) + self.assertFalse( + ContactImport._detect_spamminess( + ["tel:+593979000001", "tel:+593979000003", "tel:+593979000005", "tel:+593979000007"] + ) + ) + + self.assertTrue( + ContactImport._detect_spamminess(["tel:+593979000001", "tel:+593979000002", "tel:+593979000003"]) + ) + + # order not important + self.assertTrue( + ContactImport._detect_spamminess(["tel:+593979000003", "tel:+593979000001", "tel:+593979000002"]) + ) + + # non-numeric paths ignored + self.assertTrue( + ContactImport._detect_spamminess( + ["tel:+593979000001", "tel:ABC", "tel:+593979000002", "tel:+593979000003"] + ) + ) + + @mock_mailroom + def test_detect_spamminess_verified_org(self, mr_mocks): + # if an org is verified, no flagging occurs + self.org.verify() + + imp = self.create_contact_import("media/test_imports/sequential_tels.xlsx") + imp.start() + + self.org.refresh_from_db() + self.assertFalse(self.org.is_flagged) + + def test_data_types(self): + imp = self.create_contact_import("media/test_imports/data_formats.xlsx") + imp.start() + batch = imp.batches.get() + self.assertEqual( + [ + { + "_import_row": 2, + "uuid": "17c4388a-024f-4e67-937a-13be78a70766", + "fields": { + "a_number": "1234.5678", + "a_date": "2020-10-19T00:00:00+02:00", + "a_time": "13:17:00", + "a_datetime": "2020-10-19T13:18:00+02:00", + "price": "123.45", + }, + "groups": [str(imp.group.uuid)], + } + ], + batch.specs, + ) + + def test_parse_value(self): + imp = self.create_contact_import("media/test_imports/simple.xlsx") + kgl = ZoneInfo("Africa/Kigali") + + tests = [ + ("", ""), + (" Yes ", "Yes"), + (1234, "1234"), + (123.456, "123.456"), + (date(2020, 9, 18), "2020-09-18"), + (datetime(2020, 9, 18, 15, 45, 30, 0), "2020-09-18T15:45:30+02:00"), + (datetime(2020, 9, 18, 15, 45, 30, 0).replace(tzinfo=kgl), "2020-09-18T15:45:30+02:00"), + ] + for test in tests: + self.assertEqual(test[1], imp._parse_value(test[0], tz=kgl)) + + def test_get_default_group_name(self): + self.create_group("Testers", contacts=[]) + tests = [ + ("simple.xlsx", "Simple"), + ("testers.xlsx", "Testers 2"), # group called Testers already exists + ("contact-imports.xlsx", "Contact Imports"), + ("abc_@@é.xlsx", "Abc É"), + ("a_@@é.xlsx", "Import"), # would be too short + (f"{'x' * 100}.xlsx", "Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"), # truncated + ] + for test in tests: + self.assertEqual(test[1], ContactImport(org=self.org, original_filename=test[0]).get_default_group_name()) + + @mock_mailroom + def test_delete(self, mr_mocks): + imp = self.create_contact_import("media/test_imports/simple.xlsx") + imp.start() + imp.delete() + + self.assertEqual(0, ContactImport.objects.count()) + self.assertEqual(0, ContactImportBatch.objects.count()) diff --git a/temba/contacts/tests/test_importcrudl.py b/temba/contacts/tests/test_importcrudl.py new file mode 100644 index 00000000000..ade8811b080 --- /dev/null +++ b/temba/contacts/tests/test_importcrudl.py @@ -0,0 +1,273 @@ +from unittest.mock import patch + +from django.test.utils import override_settings +from django.urls import reverse + +from temba.contacts.models import ContactField, ContactGroup, ContactImport +from temba.tests import CRUDLTestMixin, TembaTest, mock_mailroom + + +class ContactImportCRUDLTest(TembaTest, CRUDLTestMixin): + def test_create_and_preview(self): + create_url = reverse("contacts.contactimport_create") + + self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) + self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=["file"]) + + # try posting with nothing + response = self.client.post(create_url, {}) + self.assertFormError(response.context["form"], "file", "This field is required.") + + # try uploading an empty file + response = self.client.post(create_url, {"file": self.upload("media/test_imports/empty.xlsx")}) + self.assertFormError(response.context["form"], "file", "Import file doesn't contain any records.") + + # try uploading a valid XLSX file + response = self.client.post(create_url, {"file": self.upload("media/test_imports/simple.xlsx")}) + self.assertEqual(302, response.status_code) + + imp = ContactImport.objects.get() + self.assertEqual(self.org, imp.org) + self.assertEqual(3, imp.num_records) + self.assertRegex(imp.file.name, rf"orgs/{self.org.id}/contact_imports/[\w-]{{36}}.xlsx$") + self.assertEqual("simple.xlsx", imp.original_filename) + self.assertIsNone(imp.started_on) + self.assertIsNone(imp.group) + + preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) + read_url = reverse("contacts.contactimport_read", args=[imp.id]) + + # will have been redirected to the preview view for the new import + self.assertEqual(preview_url, response.url) + + response = self.client.get(preview_url) + self.assertContains(response, "URN:Tel") + self.assertContains(response, "name") + + response = self.client.post(preview_url, {}) + self.assertEqual(302, response.status_code) + self.assertEqual(read_url, response.url) + + imp.refresh_from_db() + self.assertIsNotNone(imp.started_on) + + # can no longer access preview URL.. will be redirected to read + response = self.client.get(preview_url) + self.assertEqual(302, response.status_code) + self.assertEqual(read_url, response.url) + + @mock_mailroom + def test_creating_new_group(self, mr_mocks): + self.login(self.admin) + imp = self.create_contact_import("media/test_imports/simple.xlsx") + preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) + read_url = reverse("contacts.contactimport_read", args=[imp.id]) + + # create some groups + self.create_group("Testers", contacts=[]) + doctors = self.create_group("Doctors", contacts=[]) + + # try creating new group but not providing a name + response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": " "}) + self.assertFormError(response.context["form"], "new_group_name", "Required.") + + # try creating new group but providing an invalid name + response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": '"Foo"'}) + self.assertFormError(response.context["form"], "new_group_name", "Invalid group name.") + + # try creating new group but providing a name of an existing group + response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": "testERs"}) + self.assertFormError(response.context["form"], "new_group_name", "Already exists.") + + # try creating new group when we've already reached our group limit + with override_settings(ORG_LIMIT_DEFAULTS={"groups": 2}): + response = self.client.post( + preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": "Import"} + ) + self.assertFormError(response.context["form"], None, "This workspace has reached its limit of 2 groups.") + + # finally create new group... + response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "N", "new_group_name": "Import"}) + self.assertRedirect(response, read_url) + + new_group = ContactGroup.objects.get(name="Import") + imp.refresh_from_db() + self.assertEqual(new_group, imp.group) + + # existing group should not check for workspace limit + imp = self.create_contact_import("media/test_imports/simple.xlsx") + preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) + read_url = reverse("contacts.contactimport_read", args=[imp.id]) + with override_settings(ORG_LIMIT_DEFAULTS={"groups": 2}): + response = self.client.post( + preview_url, {"add_to_group": True, "group_mode": "E", "existing_group": doctors.id} + ) + self.assertRedirect(response, read_url) + imp.refresh_from_db() + self.assertEqual(doctors, imp.group) + + @mock_mailroom + def test_using_existing_group(self, mr_mocks): + self.login(self.admin) + imp = self.create_contact_import("media/test_imports/simple.xlsx") + preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) + read_url = reverse("contacts.contactimport_read", args=[imp.id]) + + # create some groups + self.create_field("age", "Age", ContactField.TYPE_NUMBER) + testers = self.create_group("Testers", contacts=[]) + doctors = self.create_group("Doctors", contacts=[]) + self.create_group("No Age", query='age = ""') + + # only static groups appear as options + response = self.client.get(preview_url) + self.assertEqual([doctors, testers], list(response.context["form"].fields["existing_group"].queryset)) + + # try submitting without group + response = self.client.post(preview_url, {"add_to_group": True, "group_mode": "E", "existing_group": ""}) + self.assertFormError(response.context["form"], "existing_group", "Required.") + + # finally try with actual group... + response = self.client.post( + preview_url, {"add_to_group": True, "group_mode": "E", "existing_group": doctors.id} + ) + self.assertRedirect(response, read_url) + + imp.refresh_from_db() + self.assertEqual(doctors, imp.group) + + def test_preview_with_mappings(self): + self.create_field("age", "Age", ContactField.TYPE_NUMBER) + + imp = self.create_contact_import("media/test_imports/extra_fields_and_group.xlsx") + preview_url = reverse("contacts.contactimport_preview", args=[imp.id]) + + self.assertRequestDisallowed(preview_url, [None, self.user, self.agent, self.admin2]) + + # columns 4 and 5 are a non-existent field so will have controls to create a new one + self.assertUpdateFetch( + preview_url, + [self.editor, self.admin], + form_fields=[ + "add_to_group", + "group_mode", + "new_group_name", + "existing_group", + "column_5_include", + "column_5_name", + "column_5_value_type", + "column_6_include", + "column_6_name", + "column_6_value_type", + ], + ) + + # if including a new fields, can't use existing field name + response = self.client.post( + preview_url, + { + "column_5_include": True, + "column_5_name": "Goats", + "column_5_value_type": "N", + "column_6_include": True, + "column_6_name": "age", + "column_6_value_type": "N", + "add_to_group": False, + }, + ) + self.assertEqual(1, len(response.context["form"].errors)) + self.assertFormError(response.context["form"], None, "Field name for 'Field:Sheep' matches an existing field.") + + # if including a new fields, can't repeat names + response = self.client.post( + preview_url, + { + "column_5_include": True, + "column_5_name": "Goats", + "column_5_value_type": "N", + "column_6_include": True, + "column_6_name": "goats", + "column_6_value_type": "N", + "add_to_group": False, + }, + ) + self.assertEqual(1, len(response.context["form"].errors)) + self.assertFormError(response.context["form"], None, "Field name 'goats' is repeated.") + + # if including a new field, name can't be invalid + response = self.client.post( + preview_url, + { + "column_5_include": True, + "column_5_name": "Goats", + "column_5_value_type": "N", + "column_6_include": True, + "column_6_name": "#$%^@", + "column_6_value_type": "N", + "add_to_group": False, + }, + ) + self.assertEqual(1, len(response.context["form"].errors)) + self.assertFormError( + response.context["form"], None, "Field name for 'Field:Sheep' is invalid or a reserved word." + ) + + # or empty + response = self.client.post( + preview_url, + { + "column_5_include": True, + "column_5_name": "Goats", + "column_5_value_type": "N", + "column_6_include": True, + "column_6_name": "", + "column_6_value_type": "T", + "add_to_group": False, + }, + ) + self.assertEqual(1, len(response.context["form"].errors)) + self.assertFormError(response.context["form"], None, "Field name for 'Field:Sheep' can't be empty.") + + # unless you're ignoring it + response = self.client.post( + preview_url, + { + "column_5_include": True, + "column_5_name": "Goats", + "column_5_value_type": "N", + "column_6_include": False, + "column_6_name": "", + "column_6_value_type": "T", + "add_to_group": False, + }, + ) + self.assertEqual(302, response.status_code) + + # mappings will have been updated + imp.refresh_from_db() + self.assertEqual( + [ + {"header": "URN:Tel", "mapping": {"type": "scheme", "scheme": "tel"}}, + {"header": "Name", "mapping": {"type": "attribute", "name": "name"}}, + {"header": "language", "mapping": {"type": "attribute", "name": "language"}}, + {"header": "Status", "mapping": {"type": "attribute", "name": "status"}}, + {"header": "Created On", "mapping": {"type": "ignore"}}, + { + "header": "field: goats", + "mapping": {"type": "new_field", "key": "goats", "name": "Goats", "value_type": "N"}, + }, + {"header": "Field:Sheep", "mapping": {"type": "ignore"}}, + {"header": "Group:Testers", "mapping": {"type": "ignore"}}, + ], + imp.mappings, + ) + + @patch("temba.contacts.models.ContactImport.BATCH_SIZE", 2) + def test_read(self): + imp = self.create_contact_import("media/test_imports/simple.xlsx") + imp.start() + + read_url = reverse("contacts.contactimport_read", args=[imp.id]) + + self.assertRequestDisallowed(read_url, [None, self.agent, self.admin2]) + self.assertReadFetch(read_url, [self.user, self.editor, self.admin], context_object=imp) diff --git a/temba/contacts/tests/test_templatetags.py b/temba/contacts/tests/test_templatetags.py new file mode 100644 index 00000000000..97b6df67b4b --- /dev/null +++ b/temba/contacts/tests/test_templatetags.py @@ -0,0 +1,87 @@ +from temba.contacts.models import ContactField +from temba.contacts.templatetags import contacts as tags +from temba.msgs.models import Msg +from temba.tests import TembaTest + + +class ContactsTest(TembaTest): + def test_contact_field(self): + gender = self.create_field("gender", "Gender", ContactField.TYPE_TEXT) + age = self.create_field("age", "Age", ContactField.TYPE_NUMBER) + joined = self.create_field("joined", "Joined", ContactField.TYPE_DATETIME) + last_seen_on = self.org.fields.get(key="last_seen_on") + contact = self.create_contact("Bob", fields={"age": 30, "gender": "M", "joined": "2024-01-01T00:00:00Z"}) + + self.assertEqual("M", tags.contact_field(contact, gender)) + self.assertEqual("30", tags.contact_field(contact, age)) + self.assertEqual( + "", + tags.contact_field(contact, joined), + ) + self.assertEqual("--", tags.contact_field(contact, last_seen_on)) + + def test_name_or_urn(self): + contact1 = self.create_contact("", urns=[]) + contact2 = self.create_contact("Ann", urns=[]) + contact3 = self.create_contact("Bob", urns=["tel:+12024561111", "telegram:098761111"]) + contact4 = self.create_contact("", urns=["tel:+12024562222", "telegram:098762222"]) + + self.assertEqual("", tags.name_or_urn(contact1, self.org)) + self.assertEqual("Ann", tags.name_or_urn(contact2, self.org)) + self.assertEqual("Bob", tags.name_or_urn(contact3, self.org)) + self.assertEqual("(202) 456-2222", tags.name_or_urn(contact4, self.org)) + + with self.anonymous(self.org): + self.assertEqual(f"{contact1.id:010}", tags.name_or_urn(contact1, self.org)) + self.assertEqual("Ann", tags.name_or_urn(contact2, self.org)) + self.assertEqual("Bob", tags.name_or_urn(contact3, self.org)) + self.assertEqual(f"{contact4.id:010}", tags.name_or_urn(contact4, self.org)) + + def test_urn_or_anon(self): + contact1 = self.create_contact("Bob", urns=[]) + contact2 = self.create_contact("Uri", urns=["tel:+12024561414", "telegram:098765432"]) + + self.assertEqual("--", tags.urn_or_anon(contact1, self.org)) + self.assertEqual("+1 202-456-1414", tags.urn_or_anon(contact2, self.org)) + + with self.anonymous(self.org): + self.assertEqual(f"{contact1.id:010}", tags.urn_or_anon(contact1, self.org)) + self.assertEqual(f"{contact2.id:010}", tags.urn_or_anon(contact2, self.org)) + + def test_urn_icon(self): + contact = self.create_contact("Uri", urns=["tel:+1234567890", "telegram:098765432", "viber:346376373"]) + tel_urn, tg_urn, viber_urn = contact.urns.order_by("-priority") + + self.assertEqual("icon-phone", tags.urn_icon(tel_urn)) + self.assertEqual("icon-telegram", tags.urn_icon(tg_urn)) + self.assertEqual("", tags.urn_icon(viber_urn)) + + def test_format_urn(self): + contact = self.create_contact("Uri", urns=["tel:+12024561414"]) + + self.assertEqual("+1 202-456-1414", tags.format_urn(contact.get_urn(), self.org)) + + with self.anonymous(self.org): + self.assertEqual("••••••••", tags.format_urn(contact.get_urn(), self.org)) + + def test_msg_status_badge(self): + contact = self.create_contact("Uri", urns=["tel:+12024561414"]) + msg = self.create_outgoing_msg(contact, "This is an outgoing message") + + # wired has a primary color check + msg.status = Msg.STATUS_WIRED + self.assertIn('"check"', tags.msg_status_badge(msg)) + self.assertIn("--color-primary-dark", tags.msg_status_badge(msg)) + + # delivered has a success check + msg.status = Msg.STATUS_DELIVERED + self.assertIn('"check"', tags.msg_status_badge(msg)) + self.assertIn("--success-rgb", tags.msg_status_badge(msg)) + + # errored show retrying icon + msg.status = Msg.STATUS_ERRORED + self.assertIn('"retry"', tags.msg_status_badge(msg)) + + # failed messages show an x + msg.status = Msg.STATUS_FAILED + self.assertIn('"x"', tags.msg_status_badge(msg)) diff --git a/temba/contacts/tests/test_urn.py b/temba/contacts/tests/test_urn.py new file mode 100644 index 00000000000..3fffc107ba5 --- /dev/null +++ b/temba/contacts/tests/test_urn.py @@ -0,0 +1,200 @@ +from django.db.utils import IntegrityError + +from temba.contacts.models import URN, ContactURN +from temba.tests import TembaTest + + +class ContactURNTest(TembaTest): + def setUp(self): + super().setUp() + + def test_get_display(self): + urn = ContactURN.objects.create( + org=self.org, scheme="tel", path="+250788383383", identity="tel:+250788383383", priority=50 + ) + self.assertEqual(urn.get_display(self.org), "0788 383 383") + self.assertEqual(urn.get_display(self.org, formatted=False), "+250788383383") + self.assertEqual(urn.get_display(self.org, international=True), "+250 788 383 383") + self.assertEqual(urn.get_display(self.org, formatted=False, international=True), "+250788383383") + + # friendly tel formatting for whatsapp too + urn = ContactURN.objects.create( + org=self.org, scheme="whatsapp", path="12065551212", identity="whatsapp:12065551212", priority=50 + ) + self.assertEqual(urn.get_display(self.org), "(206) 555-1212") + + # use path for other schemes + urn = ContactURN.objects.create( + org=self.org, scheme="twitter", path="billy_bob", identity="twitter:billy_bob", priority=50 + ) + self.assertEqual(urn.get_display(self.org), "billy_bob") + + # unless there's a display property + urn = ContactURN.objects.create( + org=self.org, + scheme="twitter", + path="jimmy_john", + identity="twitter:jimmy_john", + priority=50, + display="JIM", + ) + self.assertEqual(urn.get_display(self.org), "JIM") + + def test_empty_scheme_disallowed(self): + with self.assertRaises(IntegrityError): + ContactURN.objects.create(org=self.org, scheme="", path="1234", identity=":1234") + + def test_empty_path_disallowed(self): + with self.assertRaises(IntegrityError): + ContactURN.objects.create(org=self.org, scheme="ext", path="", identity="ext:") + + def test_identity_mismatch_disallowed(self): + with self.assertRaises(IntegrityError): + ContactURN.objects.create(org=self.org, scheme="ext", path="1234", identity="ext:5678") + + def test_ensure_normalization(self): + contact1 = self.create_contact("Bob", urns=["tel:+250788111111"]) + contact2 = self.create_contact("Jim", urns=["tel:+0788222222"]) + + self.org.normalize_contact_tels() + + self.assertEqual("+250788111111", contact1.urns.get().path) + self.assertEqual("+250788222222", contact2.urns.get().path) + + +class URNTest(TembaTest): + def test_facebook_urn(self): + self.assertTrue(URN.validate("facebook:ref:asdf")) + + def test_instagram_urn(self): + self.assertTrue(URN.validate("instagram:12345678901234567")) + + def test_discord_urn(self): + self.assertEqual("discord:750841288886321253", URN.from_discord("750841288886321253")) + self.assertTrue(URN.validate(URN.from_discord("750841288886321253"))) + self.assertFalse(URN.validate(URN.from_discord("not-a-discord-id"))) + + def test_whatsapp_urn(self): + self.assertTrue(URN.validate("whatsapp:12065551212")) + self.assertFalse(URN.validate("whatsapp:+12065551212")) + + def test_freshchat_urn(self): + self.assertTrue( + URN.validate("freshchat:c0534f78-b6e9-4f79-8853-11cedfc1f35b/c0534f78-b6e9-4f79-8853-11cedfc1f35b") + ) + self.assertFalse(URN.validate("freshchat:+12065551212")) + + def test_from_parts(self): + self.assertEqual(URN.from_parts("deleted", "12345"), "deleted:12345") + self.assertEqual(URN.from_parts("tel", "12345"), "tel:12345") + self.assertEqual(URN.from_parts("tel", "+12345"), "tel:+12345") + self.assertEqual(URN.from_parts("tel", "(917) 992-5253"), "tel:(917) 992-5253") + self.assertEqual(URN.from_parts("mailto", "a_b+c@d.com"), "mailto:a_b+c@d.com") + self.assertEqual(URN.from_parts("twitterid", "2352362611", display="bobby"), "twitterid:2352362611#bobby") + self.assertEqual( + URN.from_parts("twitterid", "2352362611", query="foo=ba?r", display="bobby"), + "twitterid:2352362611?foo=ba%3Fr#bobby", + ) + + self.assertEqual(URN.from_tel("+12345"), "tel:+12345") + + self.assertRaises(ValueError, URN.from_parts, "", "12345") + self.assertRaises(ValueError, URN.from_parts, "tel", "") + self.assertRaises(ValueError, URN.from_parts, "xxx", "12345") + + def test_to_parts(self): + self.assertEqual(URN.to_parts("deleted:12345"), ("deleted", "12345", None, None)) + self.assertEqual(URN.to_parts("tel:12345"), ("tel", "12345", None, None)) + self.assertEqual(URN.to_parts("tel:+12345"), ("tel", "+12345", None, None)) + self.assertEqual(URN.to_parts("twitter:abc_123"), ("twitter", "abc_123", None, None)) + self.assertEqual(URN.to_parts("mailto:a_b+c@d.com"), ("mailto", "a_b+c@d.com", None, None)) + self.assertEqual(URN.to_parts("facebook:12345"), ("facebook", "12345", None, None)) + self.assertEqual(URN.to_parts("vk:12345"), ("vk", "12345", None, None)) + self.assertEqual(URN.to_parts("telegram:12345"), ("telegram", "12345", None, None)) + self.assertEqual(URN.to_parts("telegram:12345#foobar"), ("telegram", "12345", None, "foobar")) + self.assertEqual(URN.to_parts("ext:Aa0()+,-.:=@;$_!*'"), ("ext", "Aa0()+,-.:=@;$_!*'", None, None)) + self.assertEqual(URN.to_parts("instagram:12345"), ("instagram", "12345", None, None)) + + self.assertRaises(ValueError, URN.to_parts, "tel") + self.assertRaises(ValueError, URN.to_parts, "tel:") # missing scheme + self.assertRaises(ValueError, URN.to_parts, ":12345") # missing path + self.assertRaises(ValueError, URN.to_parts, "x_y:123") # invalid scheme + self.assertRaises(ValueError, URN.to_parts, "xyz:{abc}") # invalid path + + def test_normalize(self): + # valid tel numbers + self.assertEqual(URN.normalize("tel:0788383383", "RW"), "tel:+250788383383") + self.assertEqual(URN.normalize("tel: +250788383383 ", "KE"), "tel:+250788383383") + self.assertEqual(URN.normalize("tel:+250788383383", None), "tel:+250788383383") + self.assertEqual(URN.normalize("tel:250788383383", None), "tel:+250788383383") + self.assertEqual(URN.normalize("tel:2.50788383383E+11", None), "tel:+250788383383") + self.assertEqual(URN.normalize("tel:2.50788383383E+12", None), "tel:+250788383383") + self.assertEqual(URN.normalize("tel:(917)992-5253", "US"), "tel:+19179925253") + self.assertEqual(URN.normalize("tel:19179925253", None), "tel:+19179925253") + self.assertEqual(URN.normalize("tel:+62877747666", None), "tel:+62877747666") + self.assertEqual(URN.normalize("tel:62877747666", "ID"), "tel:+62877747666") + self.assertEqual(URN.normalize("tel:0877747666", "ID"), "tel:+62877747666") + self.assertEqual(URN.normalize("tel:07531669965", "GB"), "tel:+447531669965") + self.assertEqual(URN.normalize("tel:22658125926", ""), "tel:+22658125926") + self.assertEqual(URN.normalize("tel:263780821000", "ZW"), "tel:+263780821000") + self.assertEqual(URN.normalize("tel:+2203693333", ""), "tel:+2203693333") + + # un-normalizable tel numbers + self.assertEqual(URN.normalize("tel:12345", "RW"), "tel:12345") + self.assertEqual(URN.normalize("tel:0788383383", None), "tel:0788383383") + self.assertEqual(URN.normalize("tel:0788383383", "ZZ"), "tel:0788383383") + self.assertEqual(URN.normalize("tel:MTN", "RW"), "tel:mtn") + + # twitter handles remove @ + self.assertEqual(URN.normalize("twitter: @jimmyJO"), "twitter:jimmyjo") + self.assertEqual(URN.normalize("twitterid:12345#@jimmyJO"), "twitterid:12345#jimmyjo") + + # email addresses + self.assertEqual(URN.normalize("mailto: nAme@domAIN.cOm "), "mailto:name@domain.com") + + # external ids are case sensitive + self.assertEqual(URN.normalize("ext: eXterNAL123 "), "ext:eXterNAL123") + + def test_validate(self): + self.assertFalse(URN.validate("xxxx", None)) # un-parseable URNs don't validate + + # valid tel numbers + self.assertTrue(URN.validate("tel:0788383383", "RW")) + self.assertTrue(URN.validate("tel:+250788383383", "KE")) + self.assertTrue(URN.validate("tel:+23761234567", "CM")) # old Cameroon format + self.assertTrue(URN.validate("tel:+237661234567", "CM")) # new Cameroon format + self.assertTrue(URN.validate("tel:+250788383383", None)) + + # invalid tel numbers + self.assertFalse(URN.validate("tel:0788383383", "ZZ")) # invalid country + self.assertFalse(URN.validate("tel:0788383383", None)) # no country + self.assertFalse(URN.validate("tel:MTN", "RW")) + self.assertFalse(URN.validate("tel:5912705", "US")) + + # twitter handles + self.assertTrue(URN.validate("twitter:jimmyjo")) + self.assertTrue(URN.validate("twitter:billy_bob")) + self.assertFalse(URN.validate("twitter:jimmyjo!@")) + self.assertFalse(URN.validate("twitter:billy bob")) + + # twitterid urns + self.assertTrue(URN.validate("twitterid:12345#jimmyjo")) + self.assertTrue(URN.validate("twitterid:12345#1234567")) + self.assertFalse(URN.validate("twitterid:jimmyjo#1234567")) + self.assertFalse(URN.validate("twitterid:123#a.!f")) + + # email addresses + self.assertTrue(URN.validate("mailto:abcd+label@x.y.z.com")) + self.assertFalse(URN.validate("mailto:@@@")) + + # viber urn + self.assertTrue(URN.validate("viber:dKPvqVrLerGrZw15qTuVBQ==")) + + # facebook, telegram, vk and instagram URN paths must be integers + self.assertTrue(URN.validate("telegram:12345678901234567")) + self.assertFalse(URN.validate("telegram:abcdef")) + self.assertTrue(URN.validate("facebook:12345678901234567")) + self.assertFalse(URN.validate("facebook:abcdef")) + self.assertTrue(URN.validate("vk:12345678901234567")) + self.assertTrue(URN.validate("instagram:12345678901234567")) + self.assertFalse(URN.validate("instagram:abcdef")) diff --git a/temba/contacts/views.py b/temba/contacts/views.py index 5c6e3a36692..13a061df1ec 100644 --- a/temba/contacts/views.py +++ b/temba/contacts/views.py @@ -4,15 +4,7 @@ from urllib.parse import quote_plus import iso8601 -from smartmin.views import ( - SmartCreateView, - SmartCRUDL, - SmartListView, - SmartReadView, - SmartTemplateView, - SmartUpdateView, - SmartView, -) +from smartmin.views import SmartCreateView, SmartCRUDL, SmartListView, SmartReadView, SmartUpdateView, SmartView from django import forms from django.conf import settings @@ -21,7 +13,8 @@ from django.core.validators import FileExtensionValidator from django.db import transaction from django.db.models.functions import Upper -from django.http import Http404, HttpResponse, HttpResponseNotFound, HttpResponseRedirect, JsonResponse +from django.http import HttpResponse, HttpResponseNotFound, HttpResponseRedirect, JsonResponse +from django.shortcuts import get_object_or_404 from django.urls import reverse from django.utils import timezone from django.utils.functional import cached_property @@ -34,25 +27,26 @@ from temba.mailroom.events import Event from temba.notifications.views import NotificationTargetMixin from temba.orgs.models import User -from temba.orgs.views import ( - BaseExportView, - DependencyDeleteModal, - DependencyUsagesModal, - MenuMixin, - ModalMixin, - OrgObjPermsMixin, - OrgPermsMixin, +from temba.orgs.views.base import ( + BaseDependencyDeleteModal, + BaseExportModal, + BaseListView, + BaseMenuView, + BaseReadView, + BaseUpdateModal, + BaseUsagesModal, ) +from temba.orgs.views.mixins import BulkActionMixin, OrgObjPermsMixin, OrgPermsMixin from temba.tickets.models import Ticket, Topic from temba.utils import json, on_transaction_commit from temba.utils.dates import datetime_to_timestamp, timestamp_to_datetime from temba.utils.fields import CheckboxWidget, InputWidget, SelectWidget, TembaChoiceField from temba.utils.models import patch_queryset_count from temba.utils.models.es import IDSliceQuerySet -from temba.utils.views import BulkActionMixin, ComponentFormMixin, ContentMenuMixin, NonAtomicMixin, SpaMixin +from temba.utils.views.mixins import ComponentFormMixin, ContextMenuMixin, ModalFormMixin, NonAtomicMixin, SpaMixin from .forms import ContactGroupForm, CreateContactForm, UpdateContactForm -from .models import URN, Contact, ContactExport, ContactField, ContactGroup, ContactGroupCount, ContactImport +from .models import URN, Contact, ContactExport, ContactField, ContactGroup, ContactImport from .omnibox import omnibox_query, omnibox_serialize logger = logging.getLogger(__name__) @@ -83,6 +77,7 @@ class ContactListView(SpaMixin, OrgPermsMixin, BulkActionMixin, SmartListView): sort_field = None sort_direction = None + search_fields = ("name",) # so that search box is displayed search_error = None def pre_process(self, request, *args, **kwargs): @@ -105,13 +100,6 @@ def derive_export_url(self): search = quote_plus(self.request.GET.get("search", "")) return f"{reverse('contacts.contact_export')}?g={self.group.uuid}&s={search}" - def derive_refresh(self): - # smart groups that are reevaluating should refresh every 2 seconds - if self.group.is_smart and self.group.status != ContactGroup.STATUS_READY: - return 200000 - - return None - def get_queryset(self, **kwargs): org = self.request.org self.search_error = None @@ -155,22 +143,19 @@ def get_queryset(self, **kwargs): patch_queryset_count(qs, self.group.get_member_count) return qs - def get_bulk_action_labels(self): - return ContactGroup.get_groups(self.request.org, manual_only=True) - def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) - org = self.request.org - # resolve the paginated object list so we can initialize a cache of URNs - contacts = context["object_list"] - Contact.bulk_urn_cache_initialize(contacts) + # prefetch contact URNs + Contact.bulk_urn_cache_initialize(context["object_list"]) - context["contacts"] = contacts - context["has_contacts"] = contacts or org.get_contact_count() > 0 - context["search_error"] = self.search_error + # get the first 6 featured fields as well as the last seen and created fields + featured_fields = ContactField.get_fields(org, featured=True).order_by("-priority", "id")[0:6] + proxy_fields = org.fields.filter(key__in=("last_seen_on", "created_on"), is_proxy=True).order_by("-key") + context["contact_fields"] = list(featured_fields) + list(proxy_fields) + context["search_error"] = self.search_error context["sort_direction"] = self.sort_direction context["sort_field"] = self.sort_field @@ -193,7 +178,7 @@ class ContactCRUDL(SmartCRUDL): "list", "menu", "read", - "filter", + "group", "blocked", "omnibox", "open_ticket", @@ -204,7 +189,7 @@ class ContactCRUDL(SmartCRUDL): "history", ) - class Menu(MenuMixin, OrgPermsMixin, SmartTemplateView): + class Menu(BaseMenuView): def render_to_response(self, context, **response_kwargs): org = self.request.org counts = Contact.get_status_counts(org) @@ -265,7 +250,7 @@ def render_to_response(self, context, **response_kwargs): .select_related("org") .order_by("-group_type", Upper("name")) ) - group_counts = ContactGroupCount.get_totals(groups) + group_counts = ContactGroup.get_member_counts(groups) group_items = [] for g in groups: @@ -275,7 +260,7 @@ def render_to_response(self, context, **response_kwargs): name=g.name, icon=g.icon, count=group_counts[g], - href=reverse("contacts.contact_filter", args=[g.uuid]), + href=reverse("contacts.contact_group", args=[g.uuid]), ) ) @@ -286,7 +271,7 @@ def render_to_response(self, context, **response_kwargs): return JsonResponse({"results": menu}) - class Export(BaseExportView): + class Export(BaseExportModal): export_type = ContactExport success_url = "@contacts.contact_list" size_limit = 1_000_000 @@ -327,7 +312,7 @@ def render_to_response(self, context, **response_kwargs): return JsonResponse({"results": results, "more": False, "total": len(results), "err": "nil"}) - class Read(SpaMixin, OrgObjPermsMixin, ContentMenuMixin, SmartReadView): + class Read(SpaMixin, ContextMenuMixin, BaseReadView): slug_url_kwarg = "uuid" fields = ("name",) select_related = ("current_flow",) @@ -338,10 +323,7 @@ def derive_menu_path(self): def derive_title(self): return self.object.get_display() - def get_queryset(self): - return Contact.objects.filter(is_active=True) - - def build_content_menu(self, menu): + def build_context_menu(self, menu): obj = self.get_object() if self.has_org_perm("contacts.contact_update"): @@ -355,7 +337,7 @@ def build_content_menu(self, menu): ) if obj.status == Contact.STATUS_ACTIVE: - if self.has_org_perm("flows.flow_start"): + if not obj.current_flow and self.has_org_perm("flows.flow_start"): menu.add_modax( _("Start Flow"), "start-flow", @@ -367,10 +349,8 @@ def build_content_menu(self, menu): menu.add_modax( _("Open Ticket"), "open-ticket", reverse("contacts.contact_open_ticket", args=[obj.id]) ) - if self.has_org_perm("contacts.contact_interrupt") and obj.current_flow: - menu.add_url_post(_("Interrupt"), reverse("contacts.contact_interrupt", args=(obj.id,))) - class Scheduled(OrgObjPermsMixin, SmartReadView): + class Scheduled(BaseReadView): """ Merged list of upcoming scheduled events (campaign event fires and scheduled broadcasts) """ @@ -378,21 +358,15 @@ class Scheduled(OrgObjPermsMixin, SmartReadView): permission = "contacts.contact_read" slug_url_kwarg = "uuid" - def get_queryset(self): - return Contact.objects.filter(is_active=True).select_related("org") - def render_to_response(self, context, **response_kwargs): return JsonResponse({"results": self.object.get_scheduled()}) - class History(OrgObjPermsMixin, SmartReadView): + class History(BaseReadView): slug_url_kwarg = "uuid" - def get_queryset(self): - return Contact.objects.filter(is_active=True).select_related("org") - def get_context_data(self, *args, **kwargs): context = super().get_context_data(*args, **kwargs) - contact = self.get_object() + contact = self.object # since we create messages with timestamps from external systems, always a chance a contact's initial # message has a timestamp slightly earlier than the contact itself. @@ -513,7 +487,7 @@ def get(self, request, *args, **kwargs): } return JsonResponse(summary) - class List(ContentMenuMixin, ContactListView): + class List(ContextMenuMixin, ContactListView): title = _("Active") system_group = ContactGroup.TYPE_DB_ACTIVE menu_path = "/contact/active" @@ -526,7 +500,7 @@ def get_bulk_actions(self): actions += ("start-flow",) return actions - def build_content_menu(self, menu): + def build_context_menu(self, menu): search = self.request.GET.get("search") # define save search conditions @@ -559,21 +533,14 @@ def build_content_menu(self, menu): if self.has_org_perm("contacts.contact_export"): menu.add_modax(_("Export"), "export-contacts", self.derive_export_url(), title=_("Export Contacts")) - def get_context_data(self, *args, **kwargs): - context = super().get_context_data(*args, **kwargs) - org = self.request.org - - context["contact_fields"] = ContactField.get_fields(org).order_by("-show_in_table", "-priority", "id")[0:6] - return context - - class Blocked(ContentMenuMixin, ContactListView): + class Blocked(ContextMenuMixin, ContactListView): title = _("Blocked") system_group = ContactGroup.TYPE_DB_BLOCKED def get_bulk_actions(self): return ("restore", "archive") if self.has_org_perm("contacts.contact_update") else () - def build_content_menu(self, menu): + def build_context_menu(self, menu): if self.has_org_perm("contacts.contact_export"): menu.add_modax(_("Export"), "export-contacts", self.derive_export_url(), title=_("Export Contacts")) @@ -582,7 +549,7 @@ def get_context_data(self, *args, **kwargs): context["reply_disabled"] = True return context - class Stopped(ContentMenuMixin, ContactListView): + class Stopped(ContextMenuMixin, ContactListView): title = _("Stopped") template_name = "contacts/contact_stopped.html" system_group = ContactGroup.TYPE_DB_STOPPED @@ -590,7 +557,7 @@ class Stopped(ContentMenuMixin, ContactListView): def get_bulk_actions(self): return ("restore", "archive") if self.has_org_perm("contacts.contact_update") else () - def build_content_menu(self, menu): + def build_context_menu(self, menu): if self.has_org_perm("contacts.contact_export"): menu.add_modax(_("Export"), "export-contacts", self.derive_export_url(), title=_("Export Contacts")) @@ -599,7 +566,7 @@ def get_context_data(self, *args, **kwargs): context["reply_disabled"] = True return context - class Archived(ContentMenuMixin, ContactListView): + class Archived(ContextMenuMixin, ContactListView): title = _("Archived") template_name = "contacts/contact_archived.html" system_group = ContactGroup.TYPE_DB_ARCHIVED @@ -618,17 +585,17 @@ def get_context_data(self, *args, **kwargs): context["reply_disabled"] = True return context - def build_content_menu(self, menu): + def build_context_menu(self, menu): if self.has_org_perm("contacts.contact_export"): menu.add_modax(_("Export"), "export-contacts", self.derive_export_url(), title=_("Export Contacts")) if self.has_org_perm("contacts.contact_delete"): menu.add_js("contacts_delete_all", _("Delete All")) - class Filter(OrgObjPermsMixin, ContentMenuMixin, ContactListView): - template_name = "contacts/contact_filter.html" + class Group(OrgObjPermsMixin, ContextMenuMixin, ContactListView): + template_name = "contacts/contact_group.html" - def build_content_menu(self, menu): + def build_context_menu(self, menu): if not self.group.is_system and self.has_org_perm("contacts.contactgroup_update"): menu.add_modax(_("Edit"), "edit-group", reverse("contacts.contactgroup_update", args=[self.group.id])) @@ -654,15 +621,15 @@ def get_bulk_actions(self): def get_context_data(self, *args, **kwargs): context = super().get_context_data(*args, **kwargs) - org = self.request.org - context["current_group"] = self.group - context["contact_fields"] = ContactField.get_fields(org).order_by("-priority", "id") return context @classmethod def derive_url_pattern(cls, path, action): - return r"^%s/%s/(?P[^/]+)/$" % (path, action) + return r"^%s/%s/(?P[^/]+)/$" % (path, action) + + def derive_menu_path(self): + return self.kwargs["uuid"] def get_object_org(self): return self.group.org @@ -671,16 +638,15 @@ def derive_title(self): return self.group.name def derive_group(self): - try: - return ContactGroup.objects.get( - is_active=True, + return get_object_or_404( + ContactGroup.objects.filter( + uuid=self.kwargs["uuid"], group_type__in=(ContactGroup.TYPE_MANUAL, ContactGroup.TYPE_SMART), - uuid=self.kwargs["group"], + is_active=True, ) - except ContactGroup.DoesNotExist: - raise Http404("Group not found") + ) - class Create(NonAtomicMixin, ModalMixin, OrgPermsMixin, SmartCreateView): + class Create(NonAtomicMixin, ModalFormMixin, OrgPermsMixin, SmartCreateView): form_class = CreateContactForm submit_button_name = _("Create") @@ -712,7 +678,7 @@ def form_valid(self, form): return self.render_modal_response(form) - class Update(SpaMixin, ComponentFormMixin, NonAtomicMixin, ModalMixin, OrgObjPermsMixin, SmartUpdateView): + class Update(ComponentFormMixin, NonAtomicMixin, ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): form_class = UpdateContactForm success_url = "hide" @@ -790,7 +756,7 @@ def form_valid(self, form): return self.render_modal_response(form) - class OpenTicket(ComponentFormMixin, ModalMixin, OrgObjPermsMixin, SmartUpdateView): + class OpenTicket(ComponentFormMixin, ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): """ Opens a new ticket for this contact. """ @@ -835,18 +801,21 @@ def save(self, obj): def get_success_url(self): return f"{reverse('tickets.ticket_list')}all/open/{self.ticket.uuid}/" - class Interrupt(OrgObjPermsMixin, SmartUpdateView): + class Interrupt(ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): """ Interrupt this contact """ + slug_url_kwarg = "uuid" fields = () + success_url = "hide" + submit_button_name = _("Interrupt") def save(self, obj): obj.interrupt(self.request.user) return obj - class Delete(ModalMixin, OrgObjPermsMixin, SmartUpdateView): + class Delete(ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): """ Delete this contact (can't be undone) """ @@ -862,33 +831,12 @@ def save(self, obj): class ContactGroupCRUDL(SmartCRUDL): model = ContactGroup - actions = ("create", "update", "usages", "delete", "menu") - - class Menu(MenuMixin, OrgPermsMixin, SmartTemplateView): # pragma: no cover - def derive_menu(self): - org = self.request.org - - # order groups with smart (group_type=Q) before manual (group_type=M) - all_groups = ContactGroup.get_groups(org).order_by("-group_type", Upper("name")) - group_counts = ContactGroupCount.get_totals(all_groups) - - menu = [] - for g in all_groups: - menu.append( - self.create_menu_item( - menu_id=g.uuid, - name=g.name, - icon="loader" if g.status != ContactGroup.STATUS_READY else "atom" if g.query else "", - count=group_counts[g], - href=reverse("contacts.contact_filter", args=[g.uuid]), - ) - ) - return menu + actions = ("create", "update", "usages", "delete") - class Create(ComponentFormMixin, ModalMixin, OrgPermsMixin, SmartCreateView): + class Create(ComponentFormMixin, ModalFormMixin, OrgPermsMixin, SmartCreateView): form_class = ContactGroupForm fields = ("name", "preselected_contacts", "group_query") - success_url = "uuid@contacts.contact_filter" + success_url = "uuid@contacts.contact_group" submit_button_name = _("Create") def save(self, obj): @@ -919,39 +867,32 @@ def get_form_kwargs(self): kwargs["org"] = self.request.org return kwargs - class Update(ComponentFormMixin, ModalMixin, OrgObjPermsMixin, SmartUpdateView): + class Update(BaseUpdateModal): form_class = ContactGroupForm - fields = ("name",) - success_url = "uuid@contacts.contact_filter" - - def get_queryset(self): - return super().get_queryset().filter(is_system=False) + success_url = "uuid@contacts.contact_group" def derive_fields(self): - return ("name", "query") if self.get_object().is_smart else ("name",) - - def get_form_kwargs(self): - kwargs = super().get_form_kwargs() - kwargs["org"] = self.request.org - return kwargs + return ("name", "query") if self.object.is_smart else ("name",) - def form_valid(self, form): - self.prev_query = self.get_object().query + def pre_save(self, obj): + obj._prev_query = self.get_object().query - return super().form_valid(form) + return super().pre_save(obj) def post_save(self, obj): obj = super().post_save(obj) - if obj.query and obj.query != self.prev_query: + # if query actually changed, update it + if obj.query and obj.query != obj._prev_query: obj.update_query(obj.query) + return obj - class Usages(DependencyUsagesModal): + class Usages(BaseUsagesModal): permission = "contacts.contactgroup_read" - class Delete(DependencyDeleteModal): - cancel_url = "uuid@contacts.contact_filter" + class Delete(BaseDependencyDeleteModal): + cancel_url = "uuid@contacts.contact_group" success_url = "@contacts.contact_list" @@ -1040,7 +981,7 @@ class ContactFieldCRUDL(SmartCRUDL): model = ContactField actions = ("list", "create", "update", "update_priority", "delete", "usages") - class Create(ModalMixin, OrgPermsMixin, SmartCreateView): + class Create(ModalFormMixin, OrgPermsMixin, SmartCreateView): class Form(ContactFieldForm): def clean(self): super().clean() @@ -1083,7 +1024,7 @@ def form_valid(self, form): ) return self.render_modal_response(form) - class Update(FieldLookupMixin, ModalMixin, OrgObjPermsMixin, SmartUpdateView): + class Update(FieldLookupMixin, ModalFormMixin, OrgObjPermsMixin, SmartUpdateView): queryset = ContactField.objects.filter(is_system=False) form_class = ContactFieldForm submit_button_name = _("Update") @@ -1106,7 +1047,7 @@ def form_valid(self, form): super().form_valid(form) return self.render_modal_response(form) - class Delete(FieldLookupMixin, DependencyDeleteModal): + class Delete(FieldLookupMixin, BaseDependencyDeleteModal): cancel_url = "@contacts.contactfield_list" success_url = "hide" @@ -1127,25 +1068,26 @@ def post(self, request, *args, **kwargs): return HttpResponse(json.dumps(payload), status=400, content_type="application/json") - class List(ContentMenuMixin, SpaMixin, OrgPermsMixin, SmartListView): + class List(SpaMixin, ContextMenuMixin, BaseListView): menu_path = "/contact/fields" title = _("Fields") default_order = "name" - def build_content_menu(self, menu): + def build_context_menu(self, menu): if self.has_org_perm("contacts.contactfield_create"): menu.add_modax( - _("New Field"), + _("New"), "new-field", f"{reverse('contacts.contactfield_create')}", + title=_("New Field"), on_submit="handleFieldUpdated()", as_button=True, ) - def get_queryset(self, **kwargs): - return super().get_queryset(**kwargs).filter(org=self.request.org, is_active=True, is_system=False) + def derive_queryset(self, **kwargs): + return super().derive_queryset(**kwargs).filter(is_proxy=False) - class Usages(FieldLookupMixin, DependencyUsagesModal): + class Usages(FieldLookupMixin, BaseUsagesModal): permission = "contacts.contactfield_read" queryset = ContactField.user_fields @@ -1407,6 +1349,7 @@ def post_save(self, obj): class Read(SpaMixin, OrgObjPermsMixin, NotificationTargetMixin, SmartReadView): menu_path = "/contact/import" + title = _("Contact Import") def get_notification_scope(self) -> tuple: return "import:finished", f"contact:{self.object.id}" diff --git a/temba/dashboard/tests.py b/temba/dashboard/tests.py index ca78178b5e2..d20b144f9a4 100644 --- a/temba/dashboard/tests.py +++ b/temba/dashboard/tests.py @@ -86,8 +86,8 @@ def test_range_details(self): self.login(self.admin) self.create_activity() - types = ["T", "TWT", "FB", "NX", "AT", "KN"] - michael = self.create_contact("Michael", urns=["twitter:mjackson"]) + types = ["T", "IG", "FBA", "NX", "AT", "KN"] + michael = self.create_contact("Michael", urns=["facebook:mjackson"]) for t in types: channel = self.create_channel(t, f"Test Channel {t}", f"{t}:1234") self.create_outgoing_msg(michael, f"Message on {t}", channel=channel) diff --git a/temba/dashboard/views.py b/temba/dashboard/views.py index 80eae185a5a..9663fc0f38f 100644 --- a/temba/dashboard/views.py +++ b/temba/dashboard/views.py @@ -10,8 +10,8 @@ from temba.channels.models import Channel, ChannelCount from temba.orgs.models import Org -from temba.orgs.views import OrgPermsMixin -from temba.utils.views import SpaMixin +from temba.orgs.views.mixins import OrgPermsMixin +from temba.utils.views.mixins import SpaMixin flattened_colors = [ "#335c81", diff --git a/temba/flows/__init__.py b/temba/flows/__init__.py index 09c4bf668ec..e69de29bb2d 100644 --- a/temba/flows/__init__.py +++ b/temba/flows/__init__.py @@ -1 +0,0 @@ -from .checks import * # noqa diff --git a/temba/flows/checks.py b/temba/flows/checks.py deleted file mode 100644 index 85e0a6169b5..00000000000 --- a/temba/flows/checks.py +++ /dev/null @@ -1,14 +0,0 @@ -from django.conf import settings -from django.core.checks import Warning, register - - -@register() -def mailroom_url(app_configs, **kwargs): - if not settings.MAILROOM_URL: - return [ - Warning( - "No mailroom URL set, simulation will not be available", - hint="Set MAILROOM_URL in your Django settings.", - ) - ] - return [] diff --git a/temba/flows/legacy/tests.py b/temba/flows/legacy/tests.py index 9350fb45ea4..803975f4f66 100644 --- a/temba/flows/legacy/tests.py +++ b/temba/flows/legacy/tests.py @@ -88,6 +88,12 @@ def test_migrate_v7_template(self): class FlowMigrationTest(TembaTest): + def load_flow(self, filename: str, substitutions=None, name=None): + return self.get_flow(f"legacy/migrations/{filename}", substitutions=substitutions, name=name) + + def load_flow_def(self, filename: str, substitutions=None): + return self.load_json(f"test_flows/legacy/migrations/{filename}.json", substitutions=substitutions)["flows"][0] + def migrate_flow(self, flow, to_version=None): if not to_version: to_version = Flow.FINAL_LEGACY_VERSION @@ -120,7 +126,7 @@ def test_migrate_malformed_single_message_flow(self): version_number="3", ) - flow_json = self.get_flow_json("malformed_single_message")["definition"] + flow_json = self.load_flow_def("malformed_single_message")["definition"] FlowRevision.objects.create(flow=flow, definition=flow_json, spec_version=3, revision=1, created_by=self.admin) @@ -132,7 +138,7 @@ def test_migrate_malformed_single_message_flow(self): self.assertEqual(2, flow_json["revision"]) def test_migrate_to_11_12(self): - flow = self.get_flow("favorites") + flow = self.load_flow("favorites") definition = { "entry": "79b4776b-a995-475d-ae06-1cab9af8a28e", "rule_sets": [], @@ -225,23 +231,23 @@ def test_migrate_to_11_12(self): # removed the invalid reference self.assertEqual(len(migrated["action_sets"]), 2) - flow = self.get_flow("migrate_to_11_12") - flow_json = self.get_flow_json("migrate_to_11_12") + flow = self.load_flow("migrate_to_11_12") + flow_json = self.load_flow_def("migrate_to_11_12") migrated = migrate_to_version_11_12(flow_json, flow) self.assertEqual(migrated["action_sets"][0]["actions"][0]["msg"]["base"], "Hey there, Yes or No?") self.assertEqual(len(migrated["action_sets"]), 3) def test_migrate_to_11_12_with_one_node(self): - flow = self.get_flow("migrate_to_11_12_one_node") - flow_json = self.get_flow_json("migrate_to_11_12_one_node") + flow = self.load_flow("migrate_to_11_12_one_node") + flow_json = self.load_flow_def("migrate_to_11_12_one_node") migrated = migrate_to_version_11_12(flow_json, flow) self.assertEqual(len(migrated["action_sets"]), 0) def test_migrate_to_11_12_other_org_existing_flow(self): - flow = self.get_flow("migrate_to_11_12_other_org", {"CHANNEL-UUID": str(self.channel.uuid)}) - flow_json = self.get_flow_json("migrate_to_11_12_other_org", {"CHANNEL-UUID": str(self.channel.uuid)}) + flow = self.load_flow("migrate_to_11_12_other_org", {"CHANNEL-UUID": str(self.channel.uuid)}) + flow_json = self.load_flow_def("migrate_to_11_12_other_org", {"CHANNEL-UUID": str(self.channel.uuid)}) # change ownership of the channel it's referencing self.channel.org = self.org2 @@ -256,14 +262,14 @@ def test_migrate_to_11_12_channel_dependencies(self): self.channel.name = "1234" self.channel.save() - self.get_flow("migrate_to_11_12_one_node") + self.load_flow("migrate_to_11_12_one_node") flow = Flow.objects.filter(name="channel").first() self.assertEqual(flow.channel_dependencies.count(), 1) def test_migrate_to_11_11(self): - flow = self.get_flow("migrate_to_11_11") - flow_json = self.get_flow_json("migrate_to_11_11") + flow = self.load_flow("migrate_to_11_11") + flow_json = self.load_flow_def("migrate_to_11_11") migrated = migrate_to_version_11_11(flow_json, flow) migrated_labels = get_labels(migrated) @@ -271,7 +277,7 @@ def test_migrate_to_11_11(self): self.assertTrue(Label.objects.filter(uuid=uuid, name=name).exists(), msg="Label UUID mismatch") def test_migrate_to_11_10(self): - import_def = self.get_import_json("migrate_to_11_10") + import_def = self.load_json("test_flows/legacy/migrations/migrate_to_11_10.json") migrated_import = migrate_export_to_version_11_10(import_def, self.org) migrated = migrated_import["flows"][1] @@ -322,14 +328,14 @@ def test_migrate_to_11_10(self): ) def test_migrate_to_11_9(self): - flow = self.get_flow("migrate_to_11_9", name="Master") + flow = self.load_flow("migrate_to_11_9", name="Master") # give our flows same UUIDs as in import and make 2 of them invalid Flow.objects.filter(name="Valid1").update(uuid="b823cc3b-aaa6-4cd1-b7a5-28d6b492cfa3") Flow.objects.filter(name="Invalid1").update(uuid="ad40071e-a665-4df3-af14-0bc0fe589244", is_archived=True) Flow.objects.filter(name="Invalid2").update(uuid="136cdab3-e9d1-458c-b6eb-766afd92b478", is_active=False) - import_def = self.get_import_json("migrate_to_11_9") + import_def = self.load_json("test_flows/legacy/migrations/migrate_to_11_9.json") flow_def = import_def["flows"][-1] self.assertEqual(len(flow_def["rule_sets"]), 4) @@ -349,7 +355,7 @@ def get_rule_uuids(f): uuids.append(rule["uuid"]) return uuids - original = self.get_flow_json("migrate_to_11_8") + original = self.load_flow_def("migrate_to_11_8") original_uuids = get_rule_uuids(original) self.assertEqual(len(original_uuids), 9) @@ -363,7 +369,7 @@ def get_rule_uuids(f): self.assertEqual(len(set(migrated_uuids).difference(original_uuids)), 2) def test_migrate_to_11_7(self): - original = self.get_flow_json("migrate_to_11_7") + original = self.load_flow_def("migrate_to_11_7") self.assertEqual(len(original["action_sets"]), 5) self.assertEqual(len(original["rule_sets"]), 1) @@ -374,8 +380,8 @@ def test_migrate_to_11_7(self): self.assertEqual(len(migrated["rule_sets"]), 6) def test_migrate_to_11_6(self): - flow = self.get_flow("migrate_to_11_6") - flow_json = self.get_flow_json("migrate_to_11_6") + flow = self.load_flow("migrate_to_11_6") + flow_json = self.load_flow_def("migrate_to_11_6") migrated = migrate_to_version_11_6(flow_json, flow) migrated_groups = get_legacy_groups(migrated) @@ -383,7 +389,7 @@ def test_migrate_to_11_6(self): self.assertTrue(ContactGroup.objects.filter(uuid=uuid, name=name).exists(), msg="Group UUID mismatch") def test_migrate_to_11_5(self): - flow_json = self.get_flow_json("migrate_to_11_5") + flow_json = self.load_flow_def("migrate_to_11_5") flow_json = migrate_to_version_11_5(flow_json) # check text was updated in the reply action @@ -436,7 +442,7 @@ def test_migrate_to_11_5(self): @mock_mailroom def test_migrate_to_11_4(self, mr_mocks): - flow_json = self.get_flow_json("migrate_to_11_4") + flow_json = self.load_flow_def("migrate_to_11_4") migrated = migrate_to_version_11_4(flow_json.copy()) # gather up replies to check expressions were migrated @@ -463,7 +469,7 @@ def test_migrate_to_11_4(self, mr_mocks): self.assertEqual("", migrated["action_sets"][0]["actions"][0]["msg"]["eng"]) def test_migrate_to_11_3(self): - flow_json = self.get_flow_json("migrate_to_11_3") + flow_json = self.load_flow_def("migrate_to_11_3") migrated = migrate_to_version_11_3(flow_json) @@ -643,8 +649,8 @@ def test_migrate_to_11_0(self): self.create_field("district", "District", ContactField.TYPE_DISTRICT) self.create_field("joined_on", "Joined On", ContactField.TYPE_DATETIME) - flow = self.get_flow("type_flow") - flow_def = self.get_flow_json("type_flow") + flow = self.load_flow("type_flow") + flow_def = self.load_flow_def("type_flow") migrated = migrate_to_version_11_0(flow_def, flow) # gather up replies to check expressions were migrated @@ -672,7 +678,7 @@ def test_migrate_to_11_0(self): ) def test_migrate_to_11_0_with_null_ruleset_label(self): - flow = self.get_flow("migrate_to_11_0") + flow = self.load_flow("migrate_to_11_0") definition = { "rule_sets": [ { @@ -693,7 +699,7 @@ def test_migrate_to_11_0_with_null_ruleset_label(self): self.assertEqual(migrated, definition) def test_migrate_to_11_0_with_null_msg_text(self): - flow = self.get_flow("migrate_to_11_0") + flow = self.load_flow("migrate_to_11_0") definition = { "action_sets": [ { @@ -710,8 +716,8 @@ def test_migrate_to_11_0_with_null_msg_text(self): self.assertEqual(migrated, definition) def test_migrate_to_11_0_with_broken_localization(self): - flow = self.get_flow("migrate_to_11_0") - flow_def = self.get_flow_json("migrate_to_11_0") + flow = self.load_flow("migrate_to_11_0") + flow_def = self.load_flow_def("migrate_to_11_0") migrated = migrate_to_version_11_0(flow_def, flow) self.assertEqual( @@ -741,7 +747,7 @@ def test_migrate_to_10_4(self): self.assertIsNotNone(action["uuid"]) def test_migrate_to_10_3(self): - flow_def = self.get_flow_json("favorites") + flow_def = self.load_flow_def("favorites") migrated = migrate_to_version_10_3(flow_def, flow=None) # make sure all of our action sets have an exit uuid @@ -749,13 +755,13 @@ def test_migrate_to_10_3(self): self.assertIsNotNone(actionset.get("exit_uuid")) def test_migrate_to_10_2(self): - flow_def = self.get_flow_json("single_message_bad_localization") + flow_def = self.load_flow_def("single_message_bad_localization") migrated = migrate_to_version_10_2(flow_def) self.assertEqual("Campaign Message 12", migrated["action_sets"][0]["actions"][0]["msg"]["eng"]) def test_migrate_to_10_1(self): - flow_def = self.get_flow_json("favorites") + flow_def = self.load_flow_def("favorites") migrated = migrate_to_version_10_1(flow_def, flow=None) # make sure all of our actions have uuids set @@ -765,8 +771,8 @@ def test_migrate_to_10_1(self): def test_migrate_to_10(self): # this is really just testing our rewriting of webhook rulesets - flow = self.get_flow("dual_webhook") - flow_def = self.get_flow_json("dual_webhook") + flow = self.load_flow("dual_webhook") + flow_def = self.load_flow_def("dual_webhook") # get our definition out migrated = migrate_to_version_10(flow_def, flow=flow) @@ -793,7 +799,7 @@ def test_migrate_to_9(self): label_id=label.pk, ) - exported_json = self.get_import_json("migrate_to_9", substitutions) + exported_json = self.load_json("test_flows/legacy/migrations/migrate_to_9.json", substitutions) exported_json = migrate_export_to_version_9(exported_json, self.org, True) # our campaign events shouldn't have ids @@ -855,23 +861,23 @@ def test_migrate_to_9(self): self.assertNotIn("id", flow_json["metadata"]) # import the same thing again, should have the same uuids - new_exported_json = self.get_import_json("migrate_to_9", substitutions) + new_exported_json = self.load_json("test_flows/legacy/migrations/migrate_to_9.json", substitutions) new_exported_json = migrate_export_to_version_9(new_exported_json, self.org, True) self.assertEqual(flow_json["metadata"]["uuid"], new_exported_json["flows"][0]["metadata"]["uuid"]) # but when done as a different site, it should be unique - new_exported_json = self.get_import_json("migrate_to_9", substitutions) + new_exported_json = self.load_json("test_flows/legacy/migrations/migrate_to_9.json", substitutions) new_exported_json = migrate_export_to_version_9(new_exported_json, self.org, False) self.assertNotEqual(flow_json["metadata"]["uuid"], new_exported_json["flows"][0]["metadata"]["uuid"]) # can also just import a single flow - exported_json = self.get_import_json("migrate_to_9", substitutions) + exported_json = self.load_json("test_flows/legacy/migrations/migrate_to_9.json", substitutions) flow_json = migrate_to_version_9(exported_json["flows"][0], start_flow) self.assertIn("uuid", flow_json["metadata"]) self.assertNotIn("id", flow_json["metadata"]) # try it with missing metadata - flow_json = self.get_import_json("migrate_to_9", substitutions)["flows"][0] + flow_json = self.load_json("test_flows/legacy/migrations/migrate_to_9.json", substitutions)["flows"][0] del flow_json["metadata"] flow_json = migrate_to_version_9(flow_json, start_flow) self.assertEqual(1, flow_json["metadata"]["revision"]) @@ -885,7 +891,7 @@ def test_migrate_to_9(self): def test_migrate_to_8(self): # file uses old style expressions - flow_json = self.get_flow_json("old_expressions") + flow_json = self.load_flow_def("old_expressions") # migrate to the version right before us first flow_json = migrate_to_version_7(flow_json) @@ -904,7 +910,7 @@ def test_migrate_to_8(self): self.assertEqual(flow_json["rule_sets"][1]["operand"], "@(step.value + 3)") def test_migrate_to_7(self): - flow_json = self.get_flow_json("ivr_v3") + flow_json = self.load_flow_def("ivr_v3") # migrate to the version right before us first flow_json = migrate_to_version_5(flow_json) @@ -926,7 +932,7 @@ def test_migrate_to_7(self): def test_migrate_to_6(self): # file format is old non-localized format - voice_json = self.get_flow_json("ivr_v3") + voice_json = self.load_flow_def("ivr_v3") definition = voice_json.get("definition") # no language set @@ -948,7 +954,7 @@ def test_migrate_to_6(self): self.assertEqual("/recording.mp3", definition["action_sets"][0]["actions"][0]["recording"]["base"]) # now try one that doesn't have a recording set - voice_json = self.get_flow_json("ivr_v3") + voice_json = self.load_flow_def("ivr_v3") definition = voice_json.get("definition") del definition["action_sets"][0]["actions"][0]["recording"] voice_json = migrate_to_version_5(voice_json) @@ -957,7 +963,7 @@ def test_migrate_to_6(self): self.assertNotIn("recording", definition["action_sets"][0]["actions"][0]) def test_migrate_to_5_language(self): - flow_json = self.get_flow_json("multi_language_flow") + flow_json = self.load_flow_def("multi_language_flow") ruleset = flow_json["definition"]["rule_sets"][0] ruleset["operand"] = "@step.value|lower_case" @@ -980,7 +986,7 @@ def test_migrate_to_5_language(self): self.assertEqual("Otro", rules[0]["category"]["spa"]) def test_migrate_to_5(self): - flow = self.get_flow_json("favorites_v4") + flow = self.load_flow_def("favorites_v4") migrated = migrate_to_version_5(flow)["definition"] # first node should be a wait node @@ -1034,7 +1040,7 @@ def test_migrate_sample_flows(self): email_node = order_checker.get_definition()["nodes"][10] email_action = email_node["actions"][1] - self.assertEqual(["admin@nyaruka.com"], email_action["addresses"]) + self.assertEqual(["admin@textit.com"], email_action["addresses"]) def test_migrate_bad_group_names(self): # This test makes sure that bad contact groups (< 25, etc) are migrated forward properly. @@ -1042,7 +1048,7 @@ def test_migrate_bad_group_names(self): # at the time of this fix for v in ("4", "5", "6", "7", "8", "9", "10"): error = 'Failure migrating group names "%s" forward from v%s' - flow = self.get_flow("favorites_bad_group_name_v%s" % v) + flow = self.load_flow("favorites_bad_group_name_v%s" % v) self.assertIsNotNone(flow, "Failure importing favorites from v%s" % v) self.assertTrue(ContactGroup.objects.filter(name="Contacts < 25").exists(), error % ("< 25", v)) self.assertTrue(ContactGroup.objects.filter(name="Contacts > 100").exists(), error % ("> 100", v)) @@ -1052,7 +1058,7 @@ def test_migrate_bad_group_names(self): flow.release(self.admin) def test_migrate_malformed_groups(self): - flow = self.get_flow("malformed_groups") + flow = self.load_flow("malformed_groups") self.assertIsNotNone(flow) self.assertTrue(ContactGroup.objects.filter(name="Contacts < 25").exists()) self.assertTrue(ContactGroup.objects.filter(name="Unknown").exists()) diff --git a/temba/flows/management/commands/fix_flows.py b/temba/flows/management/commands/fix_flows.py deleted file mode 100644 index ecba792f092..00000000000 --- a/temba/flows/management/commands/fix_flows.py +++ /dev/null @@ -1,78 +0,0 @@ -from copy import deepcopy -from difflib import unified_diff - -from django.core.management.base import BaseCommand, CommandError - -from temba.orgs.models import Org -from temba.utils import json - - -def remove_invalid_translations(definition: dict): - """ - Removes translations of things that users shouldn't be able to localize and can't from the editor - """ - localization = definition.get("localization", {}) - ui_nodes = definition.get("_ui", {}).get("nodes", {}) - - def remove_from_localization(item_uuid: str, key: str): - for lang, trans in localization.items(): - item_trans = trans.get(item_uuid) - if item_trans and key in item_trans: - del item_trans[key] - - for node in definition.get("nodes", []): - ui_node_type = ui_nodes.get(node["uuid"], {}).get("type") - if ui_node_type in ("split_by_webhook", "split_by_subflow"): - for category in node["router"]["categories"]: - remove_from_localization(category["uuid"], "name") - for caze in node["router"]["cases"]: - remove_from_localization(caze["uuid"], "arguments") - - -fixers = [ - remove_invalid_translations, -] - - -class Command(BaseCommand): - help = "Fixes problems in flows" - - def add_arguments(self, parser): - parser.add_argument(type=int, action="store", dest="org_id", help="ID of org to fix flows for") - parser.add_argument("--preview", action="store_true", dest="preview", help="Just preview changes") - - def handle(self, org_id: int, preview: bool, *args, **options): - org = Org.objects.filter(id=org_id).first() - if not org: - raise CommandError(f"no such org with id {org_id}") - - self.stdout.write(f"Fixing flows for org '{org.name}'...") - - num_fixed = 0 - for flow in org.flows.filter(is_active=True): - if self.fix_flow(flow, preview): - num_fixed += 1 - - self.stdout.write(f" > fixed {num_fixed} flows") - - def fix_flow(self, flow, preview: bool) -> bool: - original = flow.get_definition() - definition = deepcopy(original) - - for fixer in fixers: - fixer(definition) - - old_lines = json.dumps(original, indent=2).splitlines(keepends=True) - new_lines = json.dumps(definition, indent=2).splitlines(keepends=True) - diff_lines = list(unified_diff(old_lines, new_lines, fromfile="original", tofile="fixed")) - - if diff_lines: - for line in diff_lines: - self.stdout.write(line, ending="") - - if not preview: - new_rev, issues = flow.save_revision(None, definition) - self.stdout.write(f" > new revision ({new_rev.revision}) saved for flow '{flow.name}'") - return True - else: - return False diff --git a/temba/flows/management/commands/migrate_flows.py b/temba/flows/management/commands/migrate_flows.py index 3eabc10e927..24692b43a0c 100644 --- a/temba/flows/management/commands/migrate_flows.py +++ b/temba/flows/management/commands/migrate_flows.py @@ -1,9 +1,9 @@ +import itertools import traceback from django.core.management.base import BaseCommand from temba.flows.models import Flow -from temba.utils import chunk_list class Command(BaseCommand): @@ -30,7 +30,7 @@ def migrate_flows(self): num_updated = 0 num_errored = 0 - for id_batch in chunk_list(flow_ids, 1000): + for id_batch in itertools.batched(flow_ids, 1000): for flow in Flow.objects.filter(id__in=id_batch): try: flow.ensure_current_version() diff --git a/temba/flows/management/commands/recalc_node_counts.py b/temba/flows/management/commands/recalc_node_counts.py deleted file mode 100644 index 52695749159..00000000000 --- a/temba/flows/management/commands/recalc_node_counts.py +++ /dev/null @@ -1,49 +0,0 @@ -from collections import defaultdict - -from django.core.management.base import BaseCommand -from django.db import transaction - -from temba.flows.models import Flow, FlowNodeCount, FlowRun - - -def recalc_node_counts(flow): - node_counts = defaultdict(int) - - all_runs = ( - flow.runs.filter(status__in=(FlowRun.STATUS_ACTIVE, FlowRun.STATUS_WAITING)) - .exclude(current_node_uuid=None) - .only("id", "current_node_uuid") - .order_by("id") - ) - max_id = 0 - - while True: - batch = list(all_runs.filter(id__gt=max_id)[:1000]) - if not batch: - break - max_id = batch[-1].id - - for run in batch: - node_counts[run.current_node_uuid] += 1 - - records = [] - for node_uuid, count in node_counts.items(): - records.append(FlowNodeCount(flow=flow, node_uuid=node_uuid, count=count)) - - with transaction.atomic(): - FlowNodeCount.objects.filter(flow=flow).delete() - FlowNodeCount.objects.bulk_create(records) - - -class Command(BaseCommand): - help = "Re-calculates node counts for a flow" - - def add_arguments(self, parser): - parser.add_argument("--flow", type=int, action="store", dest="flow_id", help="ID of flow to fix") - - def handle(self, flow_id, *args, **options): - flow = Flow.objects.get(id=flow_id) - - print(f"Re-calculating flow node counts for '{flow.name}' (#{flow.id})...") - - recalc_node_counts(flow) diff --git a/temba/flows/management/commands/tests.py b/temba/flows/management/commands/tests.py index 8eeb7348e96..c455a376e4e 100644 --- a/temba/flows/management/commands/tests.py +++ b/temba/flows/management/commands/tests.py @@ -2,7 +2,7 @@ from django.utils import timezone from temba.contacts.models import Contact -from temba.flows.models import FlowNodeCount, FlowStart +from temba.flows.models import FlowStart from temba.tests import TembaTest from temba.tests.engine import MockSessionWriter @@ -73,88 +73,6 @@ def test_command(self): self.assertFalse(flow3.has_issues) -class RecalcNodeCountsTest(TembaTest): - def test_recalc_node_counts(self): - contact1 = self.create_contact("Ben Haggerty", phone="+12065552020") - contact2 = self.create_contact("Joe", phone="+12065550002") - contact3 = self.create_contact("Frank", phone="+12065550003") - - def check_node_count_rebuild(flow, assert_count): - node_counts = FlowNodeCount.get_totals(flow) - - call_command("recalc_node_counts", flow_id=flow.id) - - new_counts = FlowNodeCount.get_totals(flow) - self.assertEqual(new_counts, node_counts) - self.assertEqual(assert_count, sum(new_counts.values())) - - flow = self.get_flow("favorites_v13") - nodes = flow.get_definition()["nodes"] - - color_prompt = nodes[0] - color_other = nodes[1] - color_split = nodes[2] - beer_prompt = nodes[3] - beer_split = nodes[5] - name_prompt = nodes[6] - name_split = nodes[7] - name_reply = nodes[8] - - session1 = MockSessionWriter(contact1, flow).visit(color_prompt).visit(color_split).wait().save() - session2 = MockSessionWriter(contact2, flow).visit(color_prompt).visit(color_split).wait().save() - session3 = MockSessionWriter(contact3, flow).visit(color_prompt).visit(color_split).wait().save() - - # recalculate node counts and check they are the same - check_node_count_rebuild(flow, 3) - - (session1.resume(self.create_incoming_msg(contact1, "Blue")).visit(beer_prompt).visit(beer_split).wait().save()) - ( - session2.resume(self.create_incoming_msg(contact2, "Beige")) - .visit(color_other) - .visit(color_split) - .wait() - .save() - ) - ( - session3.resume(self.create_incoming_msg(contact3, "Amber")) - .visit(color_other) - .visit(color_split) - .wait() - .save() - ) - - check_node_count_rebuild(flow, 3) - - ( - session1.resume(self.create_incoming_msg(contact1, "Primus")) - .visit(name_prompt) - .visit(name_split) - .wait() - .save() - ) - ( - session2.resume(self.create_incoming_msg(contact2, "Orange")) - .visit(color_other) - .visit(color_split) - .wait() - .save() - ) - ( - session3.resume(self.create_incoming_msg(contact3, "Amber")) - .visit(color_other) - .visit(color_split) - .wait() - .save() - ) - - check_node_count_rebuild(flow, 3) - - # contact1 replies with name to complete the flow - (session1.resume(self.create_incoming_msg(contact1, "Bob")).visit(name_reply).complete().save()) - - check_node_count_rebuild(flow, 2) - - class UndoFootgunTest(TembaTest): def test_group_changes(self): flow = self.create_flow("Test") diff --git a/temba/flows/management/commands/undo_footgun.py b/temba/flows/management/commands/undo_footgun.py index f6f9a15d18a..28280477829 100644 --- a/temba/flows/management/commands/undo_footgun.py +++ b/temba/flows/management/commands/undo_footgun.py @@ -1,3 +1,4 @@ +import itertools from collections import defaultdict from django.core.management.base import BaseCommand, CommandError @@ -5,7 +6,6 @@ from temba.contacts.models import Contact, ContactGroup from temba.flows.models import FlowRun, FlowSession, FlowStart -from temba.utils import chunk_list class Command(BaseCommand): @@ -43,7 +43,7 @@ def handle(self, start_id: int, event_types: list, dry_run: bool, quiet: bool, * num_fixed = 0 # process runs in batches - for run_id_batch in chunk_list(run_ids, self.batch_size): + for run_id_batch in itertools.batched(run_ids, self.batch_size): run_batch = list(FlowRun.objects.filter(id__in=run_id_batch).only("id", "contact_id", "session_id")) self.undo_for_batch(run_batch, undoers, dry_run) diff --git a/temba/flows/migrations/0335_flowstart_modified_by_alter_flowstart_created_by_and_more.py b/temba/flows/migrations/0335_flowstart_modified_by_alter_flowstart_created_by_and_more.py new file mode 100644 index 00000000000..5bf695d2f2a --- /dev/null +++ b/temba/flows/migrations/0335_flowstart_modified_by_alter_flowstart_created_by_and_more.py @@ -0,0 +1,39 @@ +# Generated by Django 5.1 on 2024-09-17 19:43 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0334_remove_flowrun_submitted_by"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.AddField( + model_name="flowstart", + name="modified_by", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.PROTECT, related_name="+", to=settings.AUTH_USER_MODEL + ), + ), + migrations.AlterField( + model_name="flowstart", + name="created_by", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.PROTECT, related_name="+", to=settings.AUTH_USER_MODEL + ), + ), + migrations.AlterField( + model_name="flowstart", + name="status", + field=models.CharField( + choices=[("P", "Pending"), ("S", "Starting"), ("C", "Complete"), ("F", "Failed"), ("I", "Interrupted")], + default="P", + max_length=1, + ), + ), + ] diff --git a/temba/flows/migrations/0336_alter_flowstart_status.py b/temba/flows/migrations/0336_alter_flowstart_status.py new file mode 100644 index 00000000000..229000cf056 --- /dev/null +++ b/temba/flows/migrations/0336_alter_flowstart_status.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1 on 2024-09-23 22:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0335_flowstart_modified_by_alter_flowstart_created_by_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="flowstart", + name="status", + field=models.CharField( + choices=[("P", "Pending"), ("S", "Started"), ("C", "Completed"), ("F", "Failed"), ("I", "Interrupted")], + default="P", + max_length=1, + ), + ), + ] diff --git a/temba/flows/migrations/0337_alter_flowstart_status.py b/temba/flows/migrations/0337_alter_flowstart_status.py new file mode 100644 index 00000000000..10f87d0d33d --- /dev/null +++ b/temba/flows/migrations/0337_alter_flowstart_status.py @@ -0,0 +1,29 @@ +# Generated by Django 5.1 on 2024-09-25 16:38 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0336_alter_flowstart_status"), + ] + + operations = [ + migrations.AlterField( + model_name="flowstart", + name="status", + field=models.CharField( + choices=[ + ("P", "Pending"), + ("Q", "Queued"), + ("S", "Started"), + ("C", "Completed"), + ("F", "Failed"), + ("I", "Interrupted"), + ], + default="P", + max_length=1, + ), + ), + ] diff --git a/temba/flows/migrations/0338_flowactivitycount.py b/temba/flows/migrations/0338_flowactivitycount.py new file mode 100644 index 00000000000..73d45fc653c --- /dev/null +++ b/temba/flows/migrations/0338_flowactivitycount.py @@ -0,0 +1,47 @@ +# Generated by Django 5.1.2 on 2024-11-20 21:35 + +import django.contrib.postgres.indexes +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0337_alter_flowstart_status"), + ] + + operations = [ + migrations.CreateModel( + name="FlowActivityCount", + fields=[ + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False)), + ("is_squashed", models.BooleanField(default=False)), + ("scope", models.CharField(max_length=128)), + ("count", models.IntegerField(default=0)), + ( + "flow", + models.ForeignKey( + db_index=False, + on_delete=django.db.models.deletion.PROTECT, + related_name="counts", + to="flows.flow", + ), + ), + ], + options={ + "indexes": [ + models.Index( + models.F("flow"), + django.contrib.postgres.indexes.OpClass("scope", name="varchar_pattern_ops"), + name="flowactivitycount_flow_scope", + ), + models.Index( + condition=models.Q(("is_squashed", False)), + fields=["flow", "scope"], + name="flowactivitycount_unsquashed", + ), + ], + }, + ), + ] diff --git a/temba/flows/migrations/0339_update_triggers.py b/temba/flows/migrations/0339_update_triggers.py new file mode 100644 index 00000000000..16072d9b881 --- /dev/null +++ b/temba/flows/migrations/0339_update_triggers.py @@ -0,0 +1,32 @@ +# Generated by Django 5.1.2 on 2024-11-21 17:54 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles deletion of flow runs +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_delete() RETURNS TRIGGER AS $$ +DECLARE + p INT; + _path_json JSONB; + _path_len INT; +BEGIN + -- if this is a user delete then remove from results + IF OLD.delete_from_results THEN + PERFORM temba_update_category_counts(OLD.flow_id, NULL, OLD.results::json); + END IF; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql;""" + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0338_flowactivitycount"), + ("sql", "0006_squashed"), + ] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/flows/migrations/0340_update_triggers.py b/temba/flows/migrations/0340_update_triggers.py new file mode 100644 index 00000000000..f849cb1e9f8 --- /dev/null +++ b/temba/flows/migrations/0340_update_triggers.py @@ -0,0 +1,64 @@ +# Generated by Django 5.1.2 on 2024-11-21 20:45 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles UPDATE statements on msg table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_msg_on_update() RETURNS TRIGGER AS $$ +BEGIN + -- add negative counts for all old non-null system labels that don't match the new ones + INSERT INTO msgs_systemlabelcount("org_id", "label_type", "count", "is_squashed") + SELECT o.org_id, temba_msg_determine_system_label(o), -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE temba_msg_determine_system_label(o) IS DISTINCT FROM temba_msg_determine_system_label(n) AND temba_msg_determine_system_label(o) IS NOT NULL + GROUP BY 1, 2; + + -- add counts for all new system labels that don't match the old ones + INSERT INTO msgs_systemlabelcount("org_id", "label_type", "count", "is_squashed") + SELECT n.org_id, temba_msg_determine_system_label(n), count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE temba_msg_determine_system_label(o) IS DISTINCT FROM temba_msg_determine_system_label(n) AND temba_msg_determine_system_label(n) IS NOT NULL + GROUP BY 1, 2; + + -- add negative old-state label counts for all messages being archived/restored + INSERT INTO msgs_labelcount("label_id", "is_archived", "count", "is_squashed") + SELECT ml.label_id, o.visibility != 'V', -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + INNER JOIN msgs_msg_labels ml ON ml.msg_id = o.id + WHERE (o.visibility = 'V' AND n.visibility != 'V') or (o.visibility != 'V' AND n.visibility = 'V') + GROUP BY 1, 2; + + -- add new-state label counts for all messages being archived/restored + INSERT INTO msgs_labelcount("label_id", "is_archived", "count", "is_squashed") + SELECT ml.label_id, n.visibility != 'V', count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + INNER JOIN msgs_msg_labels ml ON ml.msg_id = n.id + WHERE (o.visibility = 'V' AND n.visibility != 'V') or (o.visibility != 'V' AND n.visibility = 'V') + GROUP BY 1, 2; + + -- add new flow activity counts for incoming messages now marked as handled by a flow + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT s.flow_id, unnest(ARRAY[ + format('msgsin:hour:%s', extract(hour FROM NOW())), + format('msgsin:dow:%s', extract(isodow FROM NOW())), + format('msgsin:date:%s', NOW()::date) + ]), s.msgs, FALSE + FROM ( + SELECT n.flow_id, count(*) AS msgs FROM newtab n INNER JOIN oldtab o ON o.id = n.id + WHERE n.direction = 'I' AND o.flow_id IS NULL AND n.flow_id IS NOT NULL + GROUP BY 1 + ) s; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; +""" + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0339_update_triggers")] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/flows/migrations/0341_backfill_engagement_counts.py b/temba/flows/migrations/0341_backfill_engagement_counts.py new file mode 100644 index 00000000000..cf33ebdb09b --- /dev/null +++ b/temba/flows/migrations/0341_backfill_engagement_counts.py @@ -0,0 +1,71 @@ +# Generated by Django 5.1.2 on 2024-11-22 15:12 + +import itertools + +from django.db import migrations, transaction +from django.db.models import Sum + + +def backfill_engagement_counts(apps, schema_editor): # pragma: no cover + Flow = apps.get_model("flows", "Flow") + + flow_ids = list(Flow.objects.filter(is_active=True).order_by("id").values_list("id", flat=True)) + + print(f"Updating engagement counts for {len(flow_ids)} flows...") + + num_backfilled = 0 + + for id_batch in itertools.batched(flow_ids, 500): + flows = Flow.objects.filter(id__in=id_batch).only("id", "metadata").order_by("id") + for flow in flows: + backfill_for_flow(apps, flow) + + num_backfilled += len(flows) + print(f"> updated counts for {num_backfilled} of {len(flow_ids)} flows") + + +def backfill_for_flow(apps, flow) -> int: # pragma: no cover + FlowActivityCount = apps.get_model("flows", "FlowActivityCount") + + # no waits then no engagement counts + exit_uuids = flow.metadata.get("waiting_exit_uuids", []) + if not exit_uuids: + return + + exit_counts = flow.path_counts.filter(from_uuid__in=exit_uuids) + + with transaction.atomic(): + to_create = [] + + def add_count(scope: str, count: int): + if count > 0: + to_create.append(FlowActivityCount(flow=flow, scope=scope, count=count, is_squashed=True)) + + by_dow = exit_counts.extra({"dow": "extract(isodow FROM period)"}).values("dow").annotate(total=Sum("count")) + for count in by_dow: + add_count(f"msgsin:dow:{count['dow']}", count["total"]) + + by_hour = exit_counts.extra({"hour": "extract(hour FROM period)"}).values("hour").annotate(total=Sum("count")) + for count in by_hour: + add_count(f"msgsin:hour:{count['hour']}", count["total"]) + + by_date = exit_counts.extra({"date": "period::date"}).values("date").annotate(total=Sum("count")) + for count in by_date: + add_count(f"msgsin:date:{count['date'].isoformat()}", count["total"]) + + flow.counts.filter(scope__startswith="msgsin:").delete() + FlowActivityCount.objects.bulk_create(to_create) + return len(to_create) + + +def apply_manual(): # pragma: no cover + from django.apps import apps + + backfill_engagement_counts(apps, None) + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0340_update_triggers")] + + operations = [migrations.RunPython(backfill_engagement_counts, migrations.RunPython.noop)] diff --git a/temba/flows/migrations/0342_update_triggers.py b/temba/flows/migrations/0342_update_triggers.py new file mode 100644 index 00000000000..1be61148c07 --- /dev/null +++ b/temba/flows/migrations/0342_update_triggers.py @@ -0,0 +1,129 @@ +# Generated by Django 5.1.2 on 2024-11-26 21:50 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles INSERT statements on flowrun table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_insert() RETURNS TRIGGER AS $$ +BEGIN + -- add status counts for all new status values + INSERT INTO flows_flowrunstatuscount("flow_id", "status", "count", "is_squashed") + SELECT flow_id, status, count(*), FALSE FROM newtab GROUP BY flow_id, status; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('status:%s', status), count(*), FALSE FROM newtab GROUP BY 1, 2; + + -- add start counts for all new start values + INSERT INTO flows_flowstartcount("start_id", "count", "is_squashed") + SELECT start_id, count(*), FALSE FROM newtab WHERE start_id IS NOT NULL GROUP BY start_id; + + -- add node counts for all new current node values + INSERT INTO flows_flownodecount("flow_id", "node_uuid", "count", "is_squashed") + SELECT flow_id, current_node_uuid, count(*), FALSE FROM newtab + WHERE status IN ('A', 'W') AND current_node_uuid IS NOT NULL GROUP BY flow_id, current_node_uuid; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('node:%s', current_node_uuid), count(*), FALSE FROM newtab + WHERE status IN ('A', 'W') AND current_node_uuid IS NOT NULL GROUP BY 1, 2; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +---------------------------------------------------------------------- +-- Handles DELETE statements on flowrun table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_delete() RETURNS TRIGGER AS $$ +BEGIN + -- add negative status counts for all rows being deleted manually + INSERT INTO flows_flowrunstatuscount("flow_id", "status", "count", "is_squashed") + SELECT flow_id, status, -count(*), FALSE FROM oldtab + WHERE delete_from_results = TRUE GROUP BY flow_id, status; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('status:%s', status), -count(*), FALSE FROM oldtab + WHERE delete_from_results = TRUE GROUP BY 1, 2; + + -- add negative node counts for any runs sitting at a node + INSERT INTO flows_flownodecount("flow_id", "node_uuid", "count", "is_squashed") + SELECT flow_id, current_node_uuid, -count(*), FALSE FROM oldtab + WHERE status IN ('A', 'W') AND current_node_uuid IS NOT NULL GROUP BY flow_id, current_node_uuid; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('node:%s', current_node_uuid), -count(*), FALSE FROM oldtab + WHERE status IN ('A', 'W') AND current_node_uuid IS NOT NULL GROUP BY 1, 2; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +---------------------------------------------------------------------- +-- Handles UPDATE statements on flowrun table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_update() RETURNS TRIGGER AS $$ +BEGIN + -- add negative status counts for all old status values that don't match the new ones + INSERT INTO flows_flowrunstatuscount("flow_id", "status", "count", "is_squashed") + SELECT o.flow_id, o.status, -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE o.status != n.status + GROUP BY o.flow_id, o.status; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT o.flow_id, format('status:%s', o.status), -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE o.status != n.status + GROUP BY 1, 2; + + -- add status counts for all new status values that don't match the old ones + INSERT INTO flows_flowrunstatuscount("flow_id", "status", "count", "is_squashed") + SELECT n.flow_id, n.status, count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE o.status != n.status + GROUP BY n.flow_id, n.status; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT n.flow_id, format('status:%s', n.status), count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE o.status != n.status + GROUP BY 1, 2; + + -- add negative node counts for all old current node values that don't match the new ones + INSERT INTO flows_flownodecount("flow_id", "node_uuid", "count", "is_squashed") + SELECT o.flow_id, o.current_node_uuid, -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE o.current_node_uuid IS NOT NULL AND o.status IN ('A', 'W') AND (o.current_node_uuid != n.current_node_uuid OR n.status NOT IN ('A', 'W')) + GROUP BY o.flow_id, o.current_node_uuid; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT o.flow_id, format('node:%s', o.current_node_uuid), -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE o.current_node_uuid IS NOT NULL AND o.status IN ('A', 'W') AND (o.current_node_uuid != n.current_node_uuid OR n.status NOT IN ('A', 'W')) + GROUP BY 1, 2; + + -- add node counts for all new current node values that don't match the old ones + INSERT INTO flows_flownodecount("flow_id", "node_uuid", "count", "is_squashed") + SELECT n.flow_id, n.current_node_uuid, count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE n.current_node_uuid IS NOT NULL AND o.current_node_uuid != n.current_node_uuid AND n.status IN ('A', 'W') + GROUP BY n.flow_id, n.current_node_uuid; + + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT n.flow_id, format('node:%s', n.current_node_uuid), count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE n.current_node_uuid IS NOT NULL AND o.current_node_uuid != n.current_node_uuid AND n.status IN ('A', 'W') + GROUP BY 1, 2; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; +""" + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0341_backfill_engagement_counts")] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/flows/migrations/0343_backfill_new_counts.py b/temba/flows/migrations/0343_backfill_new_counts.py new file mode 100644 index 00000000000..7568aeeb23b --- /dev/null +++ b/temba/flows/migrations/0343_backfill_new_counts.py @@ -0,0 +1,60 @@ +# Generated by Django 5.1.2 on 2024-11-27 14:42 + +import itertools + +from django.db import migrations, transaction +from django.db.models import Q, Sum + + +def backfill_new_counts(apps, schema_editor): # pragma: no cover + Flow = apps.get_model("flows", "Flow") + + flow_ids = list(Flow.objects.filter(is_active=True).order_by("id").values_list("id", flat=True)) + + print(f"Updating node and status counts for {len(flow_ids)} flows...") + + num_backfilled = 0 + + for id_batch in itertools.batched(flow_ids, 500): + flows = Flow.objects.filter(id__in=id_batch).only("id").order_by("id") + for flow in flows: + backfill_for_flow(apps, flow) + + num_backfilled += len(flows) + print(f"> updated counts for {num_backfilled} of {len(flow_ids)} flows") + + +def backfill_for_flow(apps, flow) -> int: # pragma: no cover + FlowActivityCount = apps.get_model("flows", "FlowActivityCount") + + with transaction.atomic(): + to_create = [] + + def add_count(scope: str, count: int): + if count > 0: + to_create.append(FlowActivityCount(flow=flow, scope=scope, count=count, is_squashed=True)) + + by_node = flow.node_counts.values("node_uuid").annotate(total=Sum("count")) + for count in by_node: + add_count(f"node:{count['node_uuid']}", count["total"]) + + by_status = flow.status_counts.values("status").annotate(total=Sum("count")) + for count in by_status: + add_count(f"status:{count['status']}", count["total"]) + + flow.counts.filter(Q(scope__startswith="status:") | Q(scope__startswith="node:")).delete() + FlowActivityCount.objects.bulk_create(to_create) + return len(to_create) + + +def apply_manual(): # pragma: no cover + from django.apps import apps + + backfill_new_counts(apps, None) + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0342_update_triggers")] + + operations = [migrations.RunPython(backfill_new_counts, migrations.RunPython.noop)] diff --git a/temba/flows/migrations/0344_update_triggers.py b/temba/flows/migrations/0344_update_triggers.py new file mode 100644 index 00000000000..78d1f425da9 --- /dev/null +++ b/temba/flows/migrations/0344_update_triggers.py @@ -0,0 +1,90 @@ +# Generated by Django 5.1.2 on 2024-11-27 18:58 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles INSERT statements on flowrun table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_insert() RETURNS TRIGGER AS $$ +BEGIN + -- add status counts for all new status values + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('status:%s', status), count(*), FALSE FROM newtab GROUP BY 1, 2; + + -- add start counts for all new start values + INSERT INTO flows_flowstartcount("start_id", "count", "is_squashed") + SELECT start_id, count(*), FALSE FROM newtab WHERE start_id IS NOT NULL GROUP BY start_id; + + -- add node counts for all new current node values + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('node:%s', current_node_uuid), count(*), FALSE FROM newtab + WHERE status IN ('A', 'W') AND current_node_uuid IS NOT NULL GROUP BY 1, 2; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +---------------------------------------------------------------------- +-- Handles DELETE statements on flowrun table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_delete() RETURNS TRIGGER AS $$ +BEGIN + -- add negative status counts for all rows being deleted manually + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('status:%s', status), -count(*), FALSE FROM oldtab + WHERE delete_from_results = TRUE GROUP BY 1, 2; + + -- add negative node counts for any runs sitting at a node + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT flow_id, format('node:%s', current_node_uuid), -count(*), FALSE FROM oldtab + WHERE status IN ('A', 'W') AND current_node_uuid IS NOT NULL GROUP BY 1, 2; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +---------------------------------------------------------------------- +-- Handles UPDATE statements on flowrun table +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_update() RETURNS TRIGGER AS $$ +BEGIN + -- add negative status counts for all old status values that don't match the new ones + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT o.flow_id, format('status:%s', o.status), -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE o.status != n.status + GROUP BY 1, 2; + + -- add status counts for all new status values that don't match the old ones + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT n.flow_id, format('status:%s', n.status), count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE o.status != n.status + GROUP BY 1, 2; + + -- add negative node counts for all old current node values that don't match the new ones + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT o.flow_id, format('node:%s', o.current_node_uuid), -count(*), FALSE FROM oldtab o + INNER JOIN newtab n ON n.id = o.id + WHERE o.current_node_uuid IS NOT NULL AND o.status IN ('A', 'W') AND (o.current_node_uuid != n.current_node_uuid OR n.status NOT IN ('A', 'W')) + GROUP BY 1, 2; + + -- add node counts for all new current node values that don't match the old ones + INSERT INTO flows_flowactivitycount("flow_id", "scope", "count", "is_squashed") + SELECT n.flow_id, format('node:%s', n.current_node_uuid), count(*), FALSE FROM newtab n + INNER JOIN oldtab o ON o.id = n.id + WHERE n.current_node_uuid IS NOT NULL AND o.current_node_uuid != n.current_node_uuid AND n.status IN ('A', 'W') + GROUP BY 1, 2; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; +""" + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0343_backfill_new_counts")] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/flows/migrations/0345_backfill_segment_counts.py b/temba/flows/migrations/0345_backfill_segment_counts.py new file mode 100644 index 00000000000..4e7a360693f --- /dev/null +++ b/temba/flows/migrations/0345_backfill_segment_counts.py @@ -0,0 +1,56 @@ +# Generated by Django 5.1.2 on 2024-11-28 14:01 + +import itertools + +from django.db import migrations, transaction +from django.db.models import Sum + + +def backfill_segment_counts(apps, schema_editor): # pragma: no cover + Flow = apps.get_model("flows", "Flow") + + flow_ids = list(Flow.objects.filter(is_active=True).order_by("id").values_list("id", flat=True)) + + print(f"Backfilling segment counts for {len(flow_ids)} flows...") + + num_backfilled = 0 + + for id_batch in itertools.batched(flow_ids, 500): + flows = Flow.objects.filter(id__in=id_batch).only("id", "metadata").order_by("id") + for flow in flows: + backfill_for_flow(apps, flow) + + num_backfilled += len(flows) + print(f"> updated counts for {num_backfilled} of {len(flow_ids)} flows") + + +def backfill_for_flow(apps, flow) -> int: # pragma: no cover + FlowActivityCount = apps.get_model("flows", "FlowActivityCount") + + with transaction.atomic(): + to_create = [] + + def add_count(scope: str, count: int): + if count > 0: + to_create.append(FlowActivityCount(flow=flow, scope=scope, count=count, is_squashed=True)) + + by_segment = flow.path_counts.values("from_uuid", "to_uuid").annotate(total=Sum("count")) + for count in by_segment: + add_count(f"segment:{count['from_uuid']}:{count['to_uuid']}", count["total"]) + + flow.counts.filter(scope__startswith="segment:").delete() + FlowActivityCount.objects.bulk_create(to_create) + return len(to_create) + + +def apply_manual(): # pragma: no cover + from django.apps import apps + + backfill_segment_counts(apps, None) + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0344_update_triggers")] + + operations = [migrations.RunPython(backfill_segment_counts, migrations.RunPython.noop)] diff --git a/temba/flows/migrations/0346_update_triggers.py b/temba/flows/migrations/0346_update_triggers.py new file mode 100644 index 00000000000..f29ed051ddb --- /dev/null +++ b/temba/flows/migrations/0346_update_triggers.py @@ -0,0 +1,30 @@ +# Generated by Django 5.1.2 on 2024-11-28 18:05 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles changes relating to a flow run's path +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_path_change() RETURNS TRIGGER AS $$ +BEGIN + -- we don't support rewinding run paths, so the new path must be longer than the old + IF jsonb_array_length(COALESCE(NEW.path, '[]')::jsonb) < jsonb_array_length(COALESCE(OLD.path, '[]')::jsonb) THEN + RAISE EXCEPTION 'Cannot rewind a flow run path'; + END IF; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER temba_flowrun_insert ON flows_flowrun; +DROP FUNCTION temba_flowrun_insert(); +DROP FUNCTION temba_insert_flowpathcount(INTEGER, UUID, UUID, TIMESTAMP WITH TIME ZONE, INTEGER); +""" + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0345_backfill_segment_counts")] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/flows/migrations/0347_flowrun_path_nodes_flowrun_path_times.py b/temba/flows/migrations/0347_flowrun_path_nodes_flowrun_path_times.py new file mode 100644 index 00000000000..dff69c40b6c --- /dev/null +++ b/temba/flows/migrations/0347_flowrun_path_nodes_flowrun_path_times.py @@ -0,0 +1,24 @@ +# Generated by Django 5.1.2 on 2024-12-03 15:31 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0346_update_triggers"), + ] + + operations = [ + migrations.AddField( + model_name="flowrun", + name="path_nodes", + field=django.contrib.postgres.fields.ArrayField(base_field=models.UUIDField(), null=True, size=None), + ), + migrations.AddField( + model_name="flowrun", + name="path_times", + field=django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), null=True, size=None), + ), + ] diff --git a/temba/flows/migrations/0348_update_triggers.py b/temba/flows/migrations/0348_update_triggers.py new file mode 100644 index 00000000000..6f0766d35a1 --- /dev/null +++ b/temba/flows/migrations/0348_update_triggers.py @@ -0,0 +1,39 @@ +# Generated by Django 5.1.2 on 2024-12-04 16:07 + +from django.db import migrations + +SQL = """ +---------------------------------------------------------------------- +-- Handles changes to a flow run +---------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION temba_flowrun_on_change() RETURNS TRIGGER AS $$ +BEGIN + -- restrict status changes + IF OLD.status NOT IN ('A', 'W') AND NEW.status IN ('A', 'W') THEN RAISE EXCEPTION 'Cannot restart an exited flow run'; END IF; + + -- we don't support rewinding run paths so the new path must contain the old + IF NOT (COALESCE(NEW.path_nodes, '{}'::uuid[]) @> COALESCE(OLD.path_nodes, '{}'::uuid[])) THEN + RAISE EXCEPTION 'Cannot rewind a flow run path (old=%, new=%)', array_length(OLD.path_nodes, 1), array_length(NEW.path_nodes, 1); + END IF; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER temba_flowrun_on_change + AFTER UPDATE ON flows_flowrun + FOR EACH ROW EXECUTE PROCEDURE temba_flowrun_on_change(); + +DROP TRIGGER temba_flowrun_status_change ON flows_flowrun; +DROP TRIGGER temba_flowrun_path_change ON flows_flowrun; + +DROP FUNCTION temba_flowrun_status_change(); +DROP FUNCTION temba_flowrun_path_change(); +""" + + +class Migration(migrations.Migration): + + dependencies = [("flows", "0347_flowrun_path_nodes_flowrun_path_times")] + + operations = [migrations.RunSQL(SQL)] diff --git a/temba/flows/migrations/0349_alter_flowactivitycount_count_and_more.py b/temba/flows/migrations/0349_alter_flowactivitycount_count_and_more.py new file mode 100644 index 00000000000..90eacb26967 --- /dev/null +++ b/temba/flows/migrations/0349_alter_flowactivitycount_count_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 5.1.2 on 2024-12-04 18:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("flows", "0348_update_triggers"), + ] + + operations = [ + migrations.AlterField( + model_name="flowactivitycount", + name="count", + field=models.IntegerField(), + ), + migrations.AlterField( + model_name="flowcategorycount", + name="count", + field=models.IntegerField(), + ), + migrations.AlterField( + model_name="flownodecount", + name="count", + field=models.IntegerField(), + ), + migrations.AlterField( + model_name="flowpathcount", + name="count", + field=models.IntegerField(), + ), + migrations.AlterField( + model_name="flowrunstatuscount", + name="count", + field=models.IntegerField(), + ), + migrations.AlterField( + model_name="flowstartcount", + name="count", + field=models.IntegerField(), + ), + ] diff --git a/temba/flows/models.py b/temba/flows/models.py index 0b18e070772..aaaef831fb1 100644 --- a/temba/flows/models.py +++ b/temba/flows/models.py @@ -1,7 +1,10 @@ +import itertools import logging from array import array from collections import defaultdict -from datetime import datetime, timezone as tzone +from dataclasses import dataclass +from datetime import date, datetime, timezone as tzone +from uuid import UUID import iso8601 from django_redis import get_redis_connection @@ -9,6 +12,7 @@ from django.conf import settings from django.contrib.postgres.fields import ArrayField +from django.contrib.postgres.indexes import OpClass from django.db import models, transaction from django.db.models import Max, Prefetch, Q, Sum from django.db.models.functions import Lower, TruncDate @@ -24,9 +28,10 @@ from temba.orgs.models import DependencyMixin, Export, ExportType, Org, User from temba.templates.models import Template from temba.tickets.models import Topic -from temba.utils import analytics, chunk_list, json, on_transaction_commit, s3 +from temba.utils import analytics, json, on_transaction_commit, s3 from temba.utils.export.models import MultiSheetExporter -from temba.utils.models import JSONAsTextField, LegacyUUIDMixin, SquashableModel, TembaModel, delete_in_batches +from temba.utils.models import JSONAsTextField, LegacyUUIDMixin, TembaModel, delete_in_batches +from temba.utils.models.counts import BaseScopedCount, BaseSquashableCount from temba.utils.uuid import uuid4 from . import legacy @@ -61,14 +66,12 @@ class Flow(LegacyUUIDMixin, TembaModel, DependencyMixin): # items in metadata METADATA_RESULTS = "results" METADATA_DEPENDENCIES = "dependencies" - METADATA_WAITING_EXIT_UUIDS = "waiting_exit_uuids" METADATA_PARENT_REFS = "parent_refs" METADATA_IVR_RETRY = "ivr_retry" # items in the response from mailroom flow inspection INSPECT_RESULTS = "results" INSPECT_DEPENDENCIES = "dependencies" - INSPECT_WAITING_EXITS = "waiting_exits" INSPECT_PARENT_REFS = "parent_refs" INSPECT_ISSUES = "issues" @@ -112,7 +115,7 @@ class Flow(LegacyUUIDMixin, TembaModel, DependencyMixin): FINAL_LEGACY_VERSION = legacy.VERSIONS[-1] INITIAL_GOFLOW_VERSION = "13.0.0" # initial version of flow spec to use new engine - CURRENT_SPEC_VERSION = "13.5.0" # current flow spec version + CURRENT_SPEC_VERSION = "13.6.1" # current flow spec version EXPIRES_CHOICES = { TYPE_MESSAGE: ( @@ -388,47 +391,49 @@ def get_attrs(self): return {"icon": self.TYPE_ICONS.get(self.flow_type, "flow"), "type": self.flow_type, "uuid": self.uuid} def get_category_counts(self): - keys = [r["key"] for r in self.metadata["results"]] + # get the possible results from the flow metadata + results_by_key = {r["key"]: r for r in self.metadata["results"]} + counts = ( - FlowCategoryCount.objects.filter(flow_id=self.id) - .filter(result_key__in=keys) + self.category_counts.filter(result_key__in=results_by_key) .values("result_key", "category_name") - .annotate(count=Sum("count"), result_name=Max("result_name")) + .annotate(total=Sum("count")) ) - results = {} + # organize into dict of results keys to dicts of category names to counts + counts_by_key = defaultdict(dict) for count in counts: - key = count["result_key"] - result = results.get(key, {}) - if "name" not in result: - if count["category_name"] == "All Responses": - continue - result["key"] = key - result["name"] = count["result_name"] - result["categories"] = [dict(name=count["category_name"], count=count["count"])] - result["total"] = count["count"] - else: - result["categories"].append(dict(name=count["category_name"], count=count["count"])) - result["total"] += count["count"] - results[count["result_key"]] = result + counts_by_key[count["result_key"]][count["category_name"]] = count["total"] - for result_key, result_dict in results.items(): - for cat in result_dict["categories"]: - if result_dict["total"]: - cat["pct"] = float(cat["count"]) / float(result_dict["total"]) - else: - cat["pct"] = 0 + results = [] + for result_key, result in results_by_key.items(): + category_counts = counts_by_key.get(result_key, {}) - result_dict["categories"] = sorted(result_dict["categories"], key=lambda d: d["name"]) + # TODO maybe we shouldn't store All Responses in the first place + if not category_counts or (len(category_counts) == 1 and "All Responses" in category_counts): + continue + + result_total = sum(category_counts.values()) + result_categories = [] + for cat_name, cat_count in category_counts.items(): + result_categories.append( + { + "name": cat_name, + "count": cat_count, + "pct": (float(cat_count) / float(result_total)) if result_total else 0, + } + ) + + result_summary = { + "key": result_key, + "name": result["name"], + "categories": sorted(result_categories, key=lambda c: c["name"]), + "total": result_total, + } - # order counts by their place on the flow - result_list = [] - for key in keys: - result = results.get(key) - if result: - result_list.append(result) + results.append(result_summary) - return result_list + return results def lock(self): """ @@ -438,33 +443,28 @@ def lock(self): lock_key = FLOW_LOCK_KEY % (self.org_id, self.id) return r.lock(lock_key, FLOW_LOCK_TTL) - def get_node_counts(self): - """ - Gets the number of contacts at each node in the flow - """ - return FlowNodeCount.get_totals(self) - - def get_segment_counts(self): - """ - Gets the number of contacts to have taken each flow segment. - """ - return FlowPathCount.get_totals(self) - - def get_activity(self): + def get_activity(self) -> tuple: """ Get the activity summary for a flow as a tuple of the number of active runs at each step and a map of the previous visits """ - return self.get_node_counts(), self.get_segment_counts() - def is_starting(self): + counts = self.counts.prefix("node:").scope_totals() + by_node = {scope[5:]: count for scope, count in counts.items() if count} + + counts = self.counts.prefix("segment:").scope_totals() + by_segment = {scope[8:]: count for scope, count in counts.items() if count} + + return by_node, by_segment + + def get_active_start(self): """ Returns whether this flow is already being started by a user """ return ( - self.starts.filter(status__in=(FlowStart.STATUS_STARTING, FlowStart.STATUS_PENDING)) + self.starts.filter(status__in=(FlowStart.STATUS_PENDING, FlowStart.STATUS_STARTED)) .exclude(created_by=None) - .exists() + .first() ) def import_definition(self, user, definition, dependency_mapping): @@ -589,20 +589,30 @@ def update_single_message_flow(self, user, translations: dict, base_language: st self.save_revision(user, definition) + @classmethod + def prefetch_run_stats(cls, flows, *, using="default"): + FlowActivityCount.prefetch_by_scope(flows, prefix="status:", to_attr="_status_counts", using=using) + def get_run_stats(self): - totals_by_status = FlowRunStatusCount.get_totals(self) - total_runs = sum(totals_by_status.values()) - completed = totals_by_status.get(FlowRun.STATUS_COMPLETED, 0) + if hasattr(self, "_status_counts"): + counts = self._status_counts + else: + counts = self.counts.prefix("status:").scope_totals() + + by_status = {scope[7:]: count for scope, count in counts.items()} + + total_runs = sum(by_status.values()) + completed = by_status.get(FlowRun.STATUS_COMPLETED, 0) return { "total": total_runs, "status": { - "active": totals_by_status.get(FlowRun.STATUS_ACTIVE, 0), - "waiting": totals_by_status.get(FlowRun.STATUS_WAITING, 0), + "active": by_status.get(FlowRun.STATUS_ACTIVE, 0), + "waiting": by_status.get(FlowRun.STATUS_WAITING, 0), "completed": completed, - "expired": totals_by_status.get(FlowRun.STATUS_EXPIRED, 0), - "interrupted": totals_by_status.get(FlowRun.STATUS_INTERRUPTED, 0), - "failed": totals_by_status.get(FlowRun.STATUS_FAILED, 0), + "expired": by_status.get(FlowRun.STATUS_EXPIRED, 0), + "interrupted": by_status.get(FlowRun.STATUS_INTERRUPTED, 0), + "failed": by_status.get(FlowRun.STATUS_FAILED, 0), }, "completion": int(completed * 100 // total_runs) if total_runs else 0, } @@ -684,7 +694,6 @@ def get_metadata(cls, flow_info) -> dict: return { Flow.METADATA_RESULTS: flow_info[Flow.INSPECT_RESULTS], Flow.METADATA_DEPENDENCIES: flow_info[Flow.INSPECT_DEPENDENCIES], - Flow.METADATA_WAITING_EXIT_UUIDS: flow_info[Flow.INSPECT_WAITING_EXITS], Flow.METADATA_PARENT_REFS: flow_info[Flow.INSPECT_PARENT_REFS], } @@ -879,6 +888,54 @@ def get_dependents(self): dependents["trigger"] = self.triggers.filter(is_active=True) return dependents + def get_engagement_start(self) -> date: + """ + Gets earliest date of recorded engagement (i.e. messages in) for this flow. + """ + first = self.counts.prefix("msgsin:date:").order_by("scope").first() + return date.fromisoformat(first.scope[12:]) if first else None + + def get_engagement_by_date(self, truncate: str) -> list[tuple]: + dates = ( + self.counts.prefix("msgsin:date:") + .extra({"date": f"date_trunc('{truncate}', split_part(scope, ':', 3)::date)"}) + .values("date") + .annotate(count=Sum("count")) + .order_by("date") + ) + return [(d["date"], d["count"]) for d in dates] + + def get_engagement_by_weekday(self) -> dict[int, int]: + """ + Gets engagement counts (i.e. messages in) by day of the week. + """ + + def parse(scope: str) -> int: + """ + e.g. "msgsin:dow:3" -> 3, "msgsin:dow:7" -> 0 + """ + iso_dow = int(scope[11:]) # 1-7 Mon-Sun + return 0 if iso_dow == 7 else iso_dow # 0-6 Sun-Sat + + counts = self.counts.prefix("msgsin:dow:").scope_totals() + return {parse(scope): count for scope, count in counts.items()} + + def get_engagement_by_hour(self, tz) -> dict[int, int]: + """ + Gets engagement counts (i.e. messages in) by hour of the day. + """ + + offset = tz.utcoffset(timezone.now()).total_seconds() // 3600 # hour counts are stored in UTC + + def parse(scope: str) -> int: + """ + e.g. "msgsin:hour:7" -> 7 + """ + return (int(scope[12:]) + offset) % 24 + + counts = self.counts.prefix("msgsin:hour:").scope_totals() + return {parse(scope): count for scope, count in counts.items()} + def release(self, user, *, interrupt_sessions: bool = True): """ Releases this flow, marking it inactive. We interrupt all flow runs in a background process. @@ -913,6 +970,8 @@ def release(self, user, *, interrupt_sessions: bool = True): self.topic_dependencies.clear() self.user_dependencies.clear() + self.counts.all().delete() + # queue mailroom to interrupt sessions where contact is currently in this flow if interrupt_sessions: mailroom.queue_interrupt(self.org, flow=self) @@ -933,6 +992,7 @@ def delete(self): for start in self.starts.all(): start.delete() + delete_in_batches(self.counts.all()) delete_in_batches(self.category_counts.all()) delete_in_batches(self.path_counts.all()) delete_in_batches(self.node_counts.all()) @@ -1124,7 +1184,9 @@ class FlowRun(models.Model): results = JSONAsTextField(null=True, default=dict) # path taken by this run through the flow - path = JSONAsTextField(null=True, default=list) + path = JSONAsTextField(null=True, default=list) # to be replaced by path_nodes and path_times + path_nodes = ArrayField(models.UUIDField(), null=True) + path_times = ArrayField(models.DateTimeField(), null=True) # current node location of this run in the flow current_node_uuid = models.UUIDField(null=True) @@ -1132,20 +1194,28 @@ class FlowRun(models.Model): # set when deleting to signal to db triggers that result category counts should be decremented delete_from_results = models.BooleanField(null=True) + @dataclass + class Step: + node: UUID + time: datetime + + def get_path(self): + if self.path_nodes is not None and self.path_times is not None: + return [self.Step(node=n, time=t) for n, t in zip(self.path_nodes, self.path_times)] + else: + return [self.Step(node=UUID(s["node_uuid"]), time=iso8601.parse_date(s["arrived_on"])) for s in self.path] + def as_archive_json(self): from temba.api.v2.views import FlowRunReadSerializer - def convert_step(step): - return {"node": step["node_uuid"], "time": step["arrived_on"]} - def convert_result(result): return { "name": result.get("name"), "node": result.get("node_uuid"), "time": result["created_on"], - "input": result.get("input"), "value": result["value"], "category": result.get("category"), + "input": result.get("input"), } return { @@ -1154,7 +1224,7 @@ def convert_result(result): "flow": {"uuid": str(self.flow.uuid), "name": self.flow.name}, "contact": {"uuid": str(self.contact.uuid), "name": self.contact.name}, "responded": self.responded, - "path": [convert_step(s) for s in self.path], + "path": [{"node": str(s.node), "time": s.time.isoformat()} for s in self.get_path()], "values": {k: convert_result(r) for k, r in self.results.items()} if self.results else {}, "created_on": self.created_on.isoformat(), "modified_on": self.modified_on.isoformat(), @@ -1367,7 +1437,40 @@ def release(self): self.delete() -class FlowCategoryCount(SquashableModel): +class FlowActivityCount(BaseScopedCount): + """ + Flow-level counts of activity. + """ + + squash_over = ("flow_id", "scope") + + flow = models.ForeignKey(Flow, on_delete=models.PROTECT, related_name="counts", db_index=False) # indexed below + + @classmethod + def prefetch_by_scope(cls, flows, *, prefix: str, to_attr: str, using: str): + counts = ( + cls.objects.using(using) + .filter(flow__in=flows) + .prefix(prefix) + .values_list("flow_id", "scope") + .annotate(total=Sum("count")) + ) + by_flow = defaultdict(dict) + for count in counts: + by_flow[count[0]][count[1]] = count[2] + + for flow in flows: + setattr(flow, to_attr, by_flow[flow.id]) + + class Meta: + indexes = [ + models.Index("flow", OpClass("scope", name="varchar_pattern_ops"), name="flowactivitycount_flow_scope"), + # for squashing task + models.Index(name="flowactivitycount_unsquashed", fields=("flow", "scope"), condition=Q(is_squashed=False)), + ] + + +class FlowCategoryCount(BaseSquashableCount): """ Maintains counts for categories across all possible results in a flow """ @@ -1386,11 +1489,8 @@ class FlowCategoryCount(SquashableModel): # the name of the category category_name = models.CharField(max_length=128) - # the number of results with this category - count = models.IntegerField(default=0) - @classmethod - def get_squash_query(cls, distinct_set): + def get_squash_query(cls, distinct_set: dict) -> tuple: sql = """ WITH removed as ( DELETE FROM %(table)s WHERE "id" IN ( @@ -1406,11 +1506,11 @@ def get_squash_query(cls, distinct_set): } params = ( - distinct_set.flow_id, - distinct_set.node_uuid, - distinct_set.result_key, - distinct_set.result_name, - distinct_set.category_name, + distinct_set["flow_id"], + distinct_set["node_uuid"], + distinct_set["result_key"], + distinct_set["result_name"], + distinct_set["category_name"], ) * 2 return sql, params @@ -1427,48 +1527,16 @@ class Meta: ] -class FlowPathCount(SquashableModel): +class FlowPathCount(BaseSquashableCount): """ - Maintains hourly counts of flow paths + TODO drop """ - squash_over = ("flow_id", "from_uuid", "to_uuid", "period") - flow = models.ForeignKey(Flow, on_delete=models.PROTECT, related_name="path_counts") - - # the exit UUID of the node this path segment starts with from_uuid = models.UUIDField() - - # the UUID of the node this path segment ends with to_uuid = models.UUIDField() - - # the hour in which this activity occurred period = models.DateTimeField() - # the number of runs that tooks this path segment in that period - count = models.IntegerField(default=0) - - @classmethod - def get_squash_query(cls, distinct_set): - sql = """ - WITH removed as ( - DELETE FROM %(table)s WHERE "flow_id" = %%s AND "from_uuid" = %%s AND "to_uuid" = %%s AND "period" = date_trunc('hour', %%s) RETURNING "count" - ) - INSERT INTO %(table)s("flow_id", "from_uuid", "to_uuid", "period", "count", "is_squashed") - VALUES (%%s, %%s, %%s, date_trunc('hour', %%s), GREATEST(0, (SELECT SUM("count") FROM removed)), TRUE); - """ % { - "table": cls._meta.db_table - } - - params = (distinct_set.flow_id, distinct_set.from_uuid, distinct_set.to_uuid, distinct_set.period) * 2 - return sql, params - - @classmethod - def get_totals(cls, flow): - counts = cls.objects.filter(flow=flow) - totals = list(counts.values_list("from_uuid", "to_uuid").annotate(replies=Sum("count"))) - return {"%s:%s" % (t[0], t[1]): t[2] for t in totals} - class Meta: indexes = [ models.Index( @@ -1479,40 +1547,14 @@ class Meta: ] -class FlowNodeCount(SquashableModel): +class FlowNodeCount(BaseSquashableCount): """ - Maintains counts of unique contacts at each flow node. + TODO drop """ - squash_over = ("node_uuid",) - flow = models.ForeignKey(Flow, on_delete=models.PROTECT, related_name="node_counts") - - # the UUID of the node node_uuid = models.UUIDField(db_index=True) - # the number of contacts/runs currently at that node - count = models.IntegerField(default=0) - - @classmethod - def get_squash_query(cls, distinct_set): - sql = """ - WITH removed as ( - DELETE FROM %(table)s WHERE "node_uuid" = %%s RETURNING "count" - ) - INSERT INTO %(table)s("flow_id", "node_uuid", "count", "is_squashed") - VALUES (%%s, %%s, GREATEST(0, (SELECT SUM("count") FROM removed)), TRUE); - """ % { - "table": cls._meta.db_table - } - - return sql, (distinct_set.node_uuid, distinct_set.flow_id, distinct_set.node_uuid) - - @classmethod - def get_totals(cls, flow): - totals = list(cls.objects.filter(flow=flow).values_list("node_uuid").annotate(replies=Sum("count"))) - return {str(t[0]): t[1] for t in totals if t[1]} - class Meta: indexes = [ models.Index( @@ -1521,38 +1563,17 @@ class Meta: ] -class FlowRunStatusCount(SquashableModel): +class FlowRunStatusCount(BaseSquashableCount): """ - Maintains counts of different statuses of flow runs for all flows. These are inserted via triggers on the database. + TODO drop """ - squash_over = ("flow_id", "status") - flow = models.ForeignKey(Flow, on_delete=models.PROTECT, related_name="status_counts") status = models.CharField(max_length=1, choices=FlowRun.STATUS_CHOICES) - count = models.IntegerField(default=0) - - @classmethod - def get_squash_query(cls, distinct_set): - sql = r""" - WITH removed as ( - DELETE FROM flows_flowrunstatuscount WHERE "flow_id" = %s AND "status" = %s RETURNING "count" - ) - INSERT INTO flows_flowrunstatuscount("flow_id", "status", "count", "is_squashed") - VALUES (%s, %s, GREATEST(0, (SELECT SUM("count") FROM removed)), TRUE); - """ - - return sql, (distinct_set.flow_id, distinct_set.status) * 2 - - @classmethod - def get_totals(cls, flow): - totals = list(cls.objects.filter(flow=flow).values_list("status").annotate(total=Sum("count"))) - return {t[0]: t[1] for t in totals} class Meta: indexes = [ models.Index(fields=("flow", "status")), - # for squashing task models.Index(name="flowrun_count_unsquashed", fields=("flow", "status"), condition=Q(is_squashed=False)), ] @@ -1678,7 +1699,7 @@ def _get_run_batches(self, export, start_date, end_date, flows, responded_only: ) seen = set() - for record_batch in chunk_list(records, 1000): + for record_batch in itertools.batched(records, 1000): matching = [] for record in record_batch: seen.add(record["id"]) @@ -1699,7 +1720,7 @@ def _get_run_batches(self, export, start_date, end_date, flows, responded_only: f"Results export #{export.id} for org #{export.org.id}: found {len(run_ids)} runs in database to export" ) - for id_batch in chunk_list(run_ids, 1000): + for id_batch in itertools.batched(run_ids, 1000): run_batch = ( FlowRun.objects.filter(id__in=id_batch) .order_by("modified_on", "id") @@ -1791,15 +1812,19 @@ class FlowStart(models.Model): EXCLUSION_STARTED_PREVIOUSLY = "started_previously" # contacts been in this flow in the last 90 days EXCLUSION_NOT_SEEN_SINCE_DAYS = "not_seen_since_days" # contacts not seen for more than this number of days - STATUS_PENDING = "P" - STATUS_STARTING = "S" - STATUS_COMPLETE = "C" + STATUS_PENDING = "P" # exists in the database + STATUS_QUEUED = "Q" # batch tasks created, count_count set + STATUS_STARTED = "S" # first batch task started + STATUS_COMPLETED = "C" # last batch task completed STATUS_FAILED = "F" + STATUS_INTERRUPTED = "I" STATUS_CHOICES = ( - (STATUS_PENDING, _("Pending")), - (STATUS_STARTING, _("Starting")), - (STATUS_COMPLETE, _("Complete")), - (STATUS_FAILED, _("Failed")), + (STATUS_PENDING, "Pending"), + (STATUS_QUEUED, "Queued"), + (STATUS_STARTED, "Started"), + (STATUS_COMPLETED, "Completed"), + (STATUS_FAILED, "Failed"), + (STATUS_INTERRUPTED, "Interrupted"), ) TYPE_MANUAL = "M" @@ -1819,7 +1844,8 @@ class FlowStart(models.Model): org = models.ForeignKey(Org, on_delete=models.PROTECT, related_name="flow_starts") flow = models.ForeignKey(Flow, on_delete=models.PROTECT, related_name="starts") start_type = models.CharField(max_length=1, choices=TYPE_CHOICES) - status = models.CharField(max_length=1, default=STATUS_PENDING, choices=STATUS_CHOICES) + status = models.CharField(max_length=1, choices=STATUS_CHOICES, default=STATUS_PENDING) + contact_count = models.IntegerField(default=0, null=True) # null until status is QUEUED # who to start groups = models.ManyToManyField(ContactGroup) @@ -1828,9 +1854,6 @@ class FlowStart(models.Model): query = models.TextField(null=True) exclusions = models.JSONField(default=dict, null=True) - # the number of de-duped contacts that might be started, depending on options above - contact_count = models.IntegerField(default=0, null=True) - campaign_event = models.ForeignKey( "campaigns.CampaignEvent", null=True, on_delete=models.PROTECT, related_name="flow_starts" ) @@ -1840,10 +1863,9 @@ class FlowStart(models.Model): parent_summary = models.JSONField(null=True) session_history = models.JSONField(null=True) - created_by = models.ForeignKey( - settings.AUTH_USER_MODEL, null=True, on_delete=models.PROTECT, related_name="flow_starts" - ) + created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.PROTECT, null=True, related_name="+") created_on = models.DateTimeField(default=timezone.now) + modified_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.PROTECT, null=True, related_name="+") modified_on = models.DateTimeField(default=timezone.now) @classmethod @@ -1891,9 +1913,25 @@ def preview(cls, flow, *, include: mailroom.Inclusions, exclude: mailroom.Exclus return preview.query, preview.total + def is_starting(self) -> bool: + return self.status in (self.STATUS_PENDING, self.STATUS_STARTED) + + @classmethod + def has_unfinished(cls, org) -> bool: + return org.flow_starts.filter(status__in=(cls.STATUS_PENDING, cls.STATUS_STARTED)).exists() + def async_start(self): on_transaction_commit(lambda: mailroom.queue_flow_start(self)) + def interrupt(self, user): + """ + Interrupts this flow start + """ + + self.status = self.STATUS_INTERRUPTED + self.modified_by = user + self.save(update_fields=("status", "modified_by", "modified_on")) + def delete(self): """ Deletes this flow start - called during org deletion or trimming task. @@ -1908,8 +1946,8 @@ def delete(self): super().delete() - def __str__(self): # pragma: no cover - return f"FlowStart[id={self.id}, flow={self.flow.uuid}]" + def __repr__(self): + return f'' class Meta: indexes = [ @@ -1930,7 +1968,7 @@ class Meta: ] -class FlowStartCount(SquashableModel): +class FlowStartCount(BaseSquashableCount): """ Maintains count of how many runs a FlowStart has created. """ @@ -1938,25 +1976,10 @@ class FlowStartCount(SquashableModel): squash_over = ("start_id",) start = models.ForeignKey(FlowStart, on_delete=models.PROTECT, related_name="counts", db_index=True) - count = models.IntegerField(default=0) - - @classmethod - def get_squash_query(cls, distinct_set): - sql = """ - WITH deleted as ( - DELETE FROM %(table)s WHERE "start_id" = %%s RETURNING "count" - ) - INSERT INTO %(table)s("start_id", "count", "is_squashed") - VALUES (%%s, GREATEST(0, (SELECT SUM("count") FROM deleted)), TRUE); - """ % { - "table": cls._meta.db_table - } - - return sql, (distinct_set.start_id,) * 2 @classmethod def get_count(cls, start): - return cls.sum(start.counts.all()) + return start.counts.all().sum() @classmethod def bulk_annotate(cls, starts): diff --git a/temba/flows/tasks.py b/temba/flows/tasks.py index b04742e05e7..a53036d043a 100644 --- a/temba/flows/tasks.py +++ b/temba/flows/tasks.py @@ -1,3 +1,4 @@ +import itertools import logging from collections import defaultdict from datetime import datetime, timedelta, timezone as tzone @@ -11,21 +12,10 @@ from django.utils.timesince import timesince from temba import mailroom -from temba.utils import chunk_list from temba.utils.crons import cron_task from temba.utils.models import delete_in_batches -from .models import ( - Flow, - FlowCategoryCount, - FlowNodeCount, - FlowPathCount, - FlowRevision, - FlowRun, - FlowRunStatusCount, - FlowSession, - FlowStartCount, -) +from .models import Flow, FlowActivityCount, FlowCategoryCount, FlowRevision, FlowRun, FlowSession, FlowStartCount logger = logging.getLogger(__name__) @@ -39,18 +29,20 @@ def update_session_wait_expires(flow_id): flow = Flow.objects.get(id=flow_id) session_ids = flow.sessions.filter(status=FlowSession.STATUS_WAITING).values_list("id", flat=True) - for id_batch in chunk_list(session_ids, 1000): + for id_batch in itertools.batched(session_ids, 1000): batch = FlowSession.objects.filter(id__in=id_batch) batch.update(wait_expires_on=F("wait_started_on") + timedelta(minutes=flow.expires_after_minutes)) +@cron_task(lock_timeout=7200) +def squash_activity_counts(): + FlowActivityCount.squash() + + @cron_task(lock_timeout=7200) def squash_flow_counts(): - FlowNodeCount.squash() - FlowRunStatusCount.squash() FlowCategoryCount.squash() FlowStartCount.squash() - FlowPathCount.squash() @cron_task() @@ -93,7 +85,7 @@ def interrupt_flow_sessions(): by_org[session.org].append(session) for org, sessions in by_org.items(): - for batch in chunk_list(sessions, 100): + for batch in itertools.batched(sessions, 100): mailroom.queue_interrupt(org, sessions=batch) num_interrupted += len(sessions) diff --git a/temba/flows/tests.py b/temba/flows/tests.py deleted file mode 100644 index 0d267f37bda..00000000000 --- a/temba/flows/tests.py +++ /dev/null @@ -1,5370 +0,0 @@ -import decimal -import io -from datetime import date, datetime, timedelta, timezone as tzone -from unittest.mock import patch - -from django_redis import get_redis_connection -from openpyxl import load_workbook - -from django.core.files.storage import default_storage -from django.db.models.functions import TruncDate -from django.test.utils import override_settings -from django.urls import reverse -from django.utils import timezone - -from temba import mailroom -from temba.api.models import Resthook -from temba.archives.models import Archive -from temba.campaigns.models import Campaign, CampaignEvent -from temba.classifiers.models import Classifier -from temba.contacts.models import URN, Contact, ContactField, ContactGroup, ContactURN -from temba.globals.models import Global -from temba.orgs.integrations.dtone import DTOneType -from temba.orgs.models import Export -from temba.templates.models import TemplateTranslation -from temba.tests import CRUDLTestMixin, MockJsonResponse, TembaTest, matchers, mock_mailroom, override_brand -from temba.tests.base import get_contact_search -from temba.tests.engine import MockSessionWriter -from temba.triggers.models import Trigger -from temba.utils import json, s3 -from temba.utils.uuid import uuid4 -from temba.utils.views import TEMBA_MENU_SELECTION - -from .checks import mailroom_url -from .models import ( - Flow, - FlowCategoryCount, - FlowLabel, - FlowNodeCount, - FlowPathCount, - FlowRevision, - FlowRun, - FlowRunStatusCount, - FlowSession, - FlowStart, - FlowStartCount, - FlowUserConflictException, - FlowVersionConflictException, - ResultsExport, -) -from .tasks import ( - interrupt_flow_sessions, - squash_flow_counts, - trim_flow_revisions, - trim_flow_sessions, - update_session_wait_expires, -) -from .views import FlowCRUDL - - -class FlowTest(TembaTest, CRUDLTestMixin): - def setUp(self): - super().setUp() - - self.contact = self.create_contact("Eric", phone="+250788382382") - self.contact2 = self.create_contact("Nic", phone="+250788383383") - self.contact3 = self.create_contact("Norbert", phone="+250788123456") - self.contact4 = self.create_contact("Teeh", phone="+250788123457", language="por") - - self.other_group = self.create_group("Other", []) - - def test_get_unique_name(self): - self.assertEqual("Testing", Flow.get_unique_name(self.org, "Testing")) - - # ensure checking against existing flows is case-insensitive - testing = self.create_flow("TESTING") - - self.assertEqual("Testing 2", Flow.get_unique_name(self.org, "Testing")) - self.assertEqual("Testing", Flow.get_unique_name(self.org, "Testing", ignore=testing)) - self.assertEqual("Testing", Flow.get_unique_name(self.org2, "Testing")) # different org - - self.create_flow("Testing 2") - - self.assertEqual("Testing 3", Flow.get_unique_name(self.org, "Testing")) - - # ensure we don't exceed the name length limit - self.create_flow("X" * 64) - - self.assertEqual(f"{'X' * 62} 2", Flow.get_unique_name(self.org, "X" * 64)) - - def test_clean_name(self): - self.assertEqual("Hello", Flow.clean_name("Hello\0")) - self.assertEqual("Hello/n", Flow.clean_name("Hello\\n")) - self.assertEqual("Say 'Hi'", Flow.clean_name('Say "Hi"')) - self.assertEqual("x" * 64, Flow.clean_name("x" * 100)) - self.assertEqual("a b", Flow.clean_name(f"a{' ' * 32}b{' ' * 32}c")) - - @patch("temba.mailroom.queue_interrupt") - def test_archive(self, mock_queue_interrupt): - flow = self.get_flow("color") - flow.archive(self.admin) - - mock_queue_interrupt.assert_called_once_with(self.org, flow=flow) - - flow.refresh_from_db() - self.assertEqual(flow.is_archived, True) - self.assertEqual(flow.is_active, True) - - @patch("temba.mailroom.queue_interrupt") - def test_release(self, mock_queue_interrupt): - global1 = Global.get_or_create(self.org, self.admin, "api_key", "API Key", "234325") - flow = self.create_flow("Test") - flow.global_dependencies.add(global1) - - flow.release(self.admin) - - mock_queue_interrupt.assert_called_once_with(self.org, flow=flow) - - flow.refresh_from_db() - self.assertTrue(flow.name.startswith("deleted-")) - self.assertFalse(flow.is_archived) - self.assertFalse(flow.is_active) - self.assertEqual(0, flow.global_dependencies.count()) - - def test_get_definition(self): - favorites = self.get_flow("favorites_v13") - - # fill the definition with junk metadata - rev = favorites.get_current_revision() - rev.definition["uuid"] = "Nope" - rev.definition["name"] = "Not the name" - rev.definition["revision"] = 1234567 - rev.definition["expire_after_minutes"] = 7654 - rev.save(update_fields=("definition",)) - - # definition should use values from flow db object - definition = favorites.get_definition() - self.assertEqual(definition["uuid"], str(favorites.uuid)) - self.assertEqual(definition["name"], "Favorites") - self.assertEqual(definition["revision"], 1) - self.assertEqual(definition["expire_after_minutes"], 720) - - # when saving a new revision we overwrite metadata - favorites.save_revision(self.admin, rev.definition) - rev = favorites.get_current_revision() - self.assertEqual(rev.definition["uuid"], str(favorites.uuid)) - self.assertEqual(rev.definition["name"], "Favorites") - self.assertEqual(rev.definition["revision"], 2) - self.assertEqual(rev.definition["expire_after_minutes"], 720) - - # can't get definition of a flow with no revisions - favorites.revisions.all().delete() - self.assertRaises(AssertionError, favorites.get_definition) - - def test_ensure_current_version(self): - # importing migrates to latest spec version - flow = self.get_flow("favorites_v13") - self.assertEqual("13.5.0", flow.version_number) - self.assertEqual(1, flow.revisions.count()) - - # rewind one spec version.. - flow.version_number = "13.0.0" - flow.save(update_fields=("version_number",)) - rev = flow.revisions.get() - rev.definition["spec_version"] = "13.0.0" - rev.spec_version = "13.0.0" - rev.save() - - old_modified_on = flow.modified_on - old_saved_on = flow.saved_on - - flow.ensure_current_version() - - # check we migrate to current spec version - self.assertEqual("13.5.0", flow.version_number) - self.assertEqual(2, flow.revisions.count()) - self.assertEqual("system", flow.revisions.order_by("id").last().created_by.username) - - # saved on won't have been updated but modified on will - self.assertEqual(old_saved_on, flow.saved_on) - self.assertGreater(flow.modified_on, old_modified_on) - - def test_flow_archive_with_campaign(self): - self.login(self.admin) - self.get_flow("the_clinic") - - campaign = Campaign.objects.get(name="Appointment Schedule") - flow = Flow.objects.get(name="Confirm Appointment") - - campaign_event = CampaignEvent.objects.filter(flow=flow, campaign=campaign).first() - self.assertIsNotNone(campaign_event) - - # do not archive if the campaign is active - Flow.apply_action_archive(self.admin, Flow.objects.filter(pk=flow.pk)) - - flow.refresh_from_db() - self.assertFalse(flow.is_archived) - - campaign.is_archived = True - campaign.save() - - # can archive if the campaign is archived - Flow.apply_action_archive(self.admin, Flow.objects.filter(pk=flow.pk)) - - flow.refresh_from_db() - self.assertTrue(flow.is_archived) - - campaign.is_archived = False - campaign.save() - - flow.is_archived = False - flow.save() - - campaign_event.is_active = False - campaign_event.save() - - # can archive if the campaign is not archived with no active event - Flow.apply_action_archive(self.admin, Flow.objects.filter(pk=flow.pk)) - - flow.refresh_from_db() - self.assertTrue(flow.is_archived) - - def test_editor(self): - flow = self.get_flow("color") - - self.login(self.admin) - - flow_editor_url = reverse("flows.flow_editor", args=[flow.uuid]) - - response = self.client.get(flow_editor_url) - - self.assertTrue(response.context["mutable"]) - self.assertTrue(response.context["can_start"]) - self.assertTrue(response.context["can_simulate"]) - self.assertContains(response, reverse("flows.flow_simulate", args=[flow.id])) - self.assertContains(response, 'id="rp-flow-editor"') - - # flows that are archived can't be edited, started or simulated - self.login(self.admin) - - flow.is_archived = True - flow.save(update_fields=("is_archived",)) - - response = self.client.get(flow_editor_url) - - self.assertFalse(response.context["mutable"]) - self.assertFalse(response.context["can_start"]) - self.assertFalse(response.context["can_simulate"]) - - def test_editor_feature_filters(self): - flow = self.create_flow("Test") - - self.login(self.admin) - - def assert_features(features: set): - response = self.client.get(reverse("flows.flow_editor", args=[flow.uuid])) - self.assertEqual(features, set(json.loads(response.context["feature_filters"]))) - - # add a resthook - Resthook.objects.create(org=flow.org, created_by=self.admin, modified_by=self.admin) - assert_features({"resthook"}) - - # add an NLP classifier - Classifier.objects.create(org=flow.org, config="", created_by=self.admin, modified_by=self.admin) - assert_features({"classifier", "resthook"}) - - # add a DT One integration - DTOneType().connect(flow.org, self.admin, "login", "token") - assert_features({"airtime", "classifier", "resthook"}) - - # change our channel to use a whatsapp scheme - self.channel.schemes = [URN.WHATSAPP_SCHEME] - self.channel.save() - assert_features({"whatsapp", "airtime", "classifier", "resthook"}) - - # change our channel to use a facebook scheme - self.channel.schemes = [URN.FACEBOOK_SCHEME] - self.channel.save() - assert_features({"facebook", "optins", "airtime", "classifier", "resthook"}) - - self.setUpLocations() - - assert_features({"facebook", "optins", "airtime", "classifier", "resthook", "locations"}) - - def test_save_revision(self): - self.login(self.admin) - self.client.post( - reverse("flows.flow_create"), {"name": "Go Flow", "flow_type": Flow.TYPE_MESSAGE, "base_language": "eng"} - ) - flow = Flow.objects.get( - org=self.org, name="Go Flow", flow_type=Flow.TYPE_MESSAGE, version_number=Flow.CURRENT_SPEC_VERSION - ) - - # can't save older spec version over newer - definition = flow.revisions.order_by("id").last().definition - definition["spec_version"] = Flow.FINAL_LEGACY_VERSION - - with self.assertRaises(FlowVersionConflictException): - flow.save_revision(self.admin, definition) - - # can't save older revision over newer - definition["spec_version"] = Flow.CURRENT_SPEC_VERSION - definition["revision"] = 0 - - with self.assertRaises(FlowUserConflictException): - flow.save_revision(self.admin, definition) - - def test_clone(self): - flow = self.create_flow("123456789012345678901234567890123456789012345678901234567890") # 60 chars - flow.expires_after_minutes = 60 - flow.save(update_fields=("expires_after_minutes",)) - - copy1 = flow.clone(self.admin) - - self.assertNotEqual(flow.id, copy1.id) - self.assertEqual(60, copy1.expires_after_minutes) - - # name should start with "Copy of" and be truncated to 64 chars - self.assertEqual("Copy of 12345678901234567890123456789012345678901234567890123456", copy1.name) - - # cloning again should generate a unique name - copy2 = flow.clone(self.admin) - self.assertEqual("Copy of 123456789012345678901234567890123456789012345678901234 2", copy2.name) - copy3 = flow.clone(self.admin) - self.assertEqual("Copy of 123456789012345678901234567890123456789012345678901234 3", copy3.name) - - # ensure that truncating doesn't leave trailing spaces - flow2 = self.create_flow("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabc efghijkl") - copy2 = flow2.clone(self.admin) - self.assertEqual("Copy of abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabc", copy2.name) - - def test_copy_group_split_no_name(self): - flow = self.get_flow("group_split_no_name") - flow_def = flow.get_definition() - - copy = flow.clone(self.admin) - copy_def = copy.get_definition() - - self.assertEqual(len(copy_def["nodes"]), 1) - self.assertEqual(len(copy_def["nodes"][0]["router"]["cases"]), 1) - self.assertEqual( - copy_def["nodes"][0]["router"]["cases"][0], - { - "uuid": matchers.UUID4String(), - "type": "has_group", - "arguments": [matchers.UUID4String()], - "category_uuid": matchers.UUID4String(), - }, - ) - - # check that the original and the copy reference the same group - self.assertEqual( - flow_def["nodes"][0]["router"]["cases"][0]["arguments"], - copy_def["nodes"][0]["router"]["cases"][0]["arguments"], - ) - - def test_activity(self): - flow = self.get_flow("favorites_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_other = flow_nodes[1] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - name_prompt = flow_nodes[6] - name_split = flow_nodes[7] - end_prompt = flow_nodes[8] - - # we don't know this shade of green, it should route us to the beginning again - session1 = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "chartreuse")) - .set_result("color", "chartreuse", category="Other", input="chartreuse") - .visit(color_other) - .send_msg("I don't know that color. Try again.") - .visit(color_split) - .wait() - .save() - ) - - self.assertEqual({color_split["uuid"]: 1}, FlowNodeCount.get_totals(flow)) - - (active, visited) = flow.get_activity() - - self.assertEqual({color_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 1, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - }, - visited, - ) - self.assertEqual( - { - "total": 1, - "status": {"active": 0, "waiting": 1, "completed": 0, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - - # another unknown color, that'll route us right back again - # the active stats will look the same, but there should be one more journey on the path - ( - session1.resume(msg=self.create_incoming_msg(self.contact, "mauve")) - .set_result("color", "mauve", category="Other", input="mauve") - .visit(color_other) - .send_msg("I don't know that color. Try again.") - .visit(color_split) - .wait() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({color_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 2, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 2, - }, - visited, - ) - - # this time a color we know takes us elsewhere, activity will move - # to another node, but still just one entry - ( - session1.resume(msg=self.create_incoming_msg(self.contact, "blue")) - .set_result("color", "blue", category="Blue", input="blue") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Blue too! What is your favorite beer?") - .visit(beer_split) - .wait() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({beer_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 2, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 2, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 1, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 1, - }, - visited, - ) - - # a new participant, showing distinct active counts and incremented path - ryan = self.create_contact("Ryan Lewis", phone="+12065550725") - session2 = ( - MockSessionWriter(ryan, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(ryan, "burnt sienna")) - .set_result("color", "burnt sienna", category="Other", input="burnt sienna") - .visit(color_other) - .send_msg("I don't know that color. Try again.") - .visit(color_split) - .wait() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({color_split["uuid"]: 1, beer_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 2, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 3, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 3, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 1, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 1, - }, - visited, - ) - self.assertEqual( - { - "total": 2, - "status": {"active": 0, "waiting": 2, "completed": 0, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - - # now let's have them land in the same place - ( - session2.resume(msg=self.create_incoming_msg(ryan, "blue")) - .set_result("color", "blue", category="Blue", input="blue") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Blue too! What is your favorite beer?") - .visit(beer_split) - .wait() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({beer_split["uuid"]: 2}, active) - - # now move our first contact forward to the end - ( - session1.resume(msg=self.create_incoming_msg(self.contact, "Turbo King")) - .visit(name_prompt, exit_index=2) - .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "Ben Haggerty")) - .visit(end_prompt) - .complete() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({beer_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 2, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 3, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 3, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 2, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 2, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 1, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 1, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 1, - }, - visited, - ) - - # half of our flows are now complete - self.assertEqual( - { - "total": 2, - "status": {"active": 0, "waiting": 1, "completed": 1, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 50, - }, - flow.get_run_stats(), - ) - - # check squashing doesn't change anything - squash_flow_counts() - - (active, visited) = flow.get_activity() - - self.assertEqual({beer_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 2, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 3, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 3, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 2, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 2, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 1, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 1, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 1, - }, - visited, - ) - self.assertEqual( - { - "total": 2, - "status": {"active": 0, "waiting": 1, "completed": 1, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 50, - }, - flow.get_run_stats(), - ) - self.assertEqual( - [ - { - "categories": [ - {"count": 2, "name": "Blue", "pct": 1.0}, - {"count": 0, "name": "Other", "pct": 0.0}, - ], - "key": "color", - "name": "color", - "total": 2, - } - ], - flow.get_category_counts(), - ) - - # now let's delete our contact, we'll still have one active node, but - # our visit path counts will go down by two since he went there twice - self.contact.release(self.user) - - (active, visited) = flow.get_activity() - - self.assertEqual({beer_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 1, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 1, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 1, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 0, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 0, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 0, - }, - visited, - ) - self.assertEqual( - { - "total": 1, - "status": {"active": 0, "waiting": 1, "completed": 0, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - self.assertEqual( - [ - { - "categories": [ - {"count": 1, "name": "Blue", "pct": 1.0}, - {"count": 0, "name": "Other", "pct": 0.0}, - ], - "key": "color", - "name": "color", - "total": 1, - } - ], - flow.get_category_counts(), - ) - - # advance ryan to the end to make sure our percentage accounts for one less contact - ( - session2.resume(msg=self.create_incoming_msg(ryan, "Turbo King")) - .visit(name_prompt, exit_index=2) - .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(ryan, "Ryan Lewis")) - .visit(end_prompt) - .complete() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 1, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 1, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 1, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 1, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 1, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 1, - }, - visited, - ) - self.assertEqual( - { - "total": 1, - "status": {"active": 0, "waiting": 0, "completed": 1, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 100, - }, - flow.get_run_stats(), - ) - - # delete our last contact to make sure activity is gone without first expiring, zeros abound - ryan.release(self.admin) - - (active, visited) = flow.get_activity() - - self.assertEqual({}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 0, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 0, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 0, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 0, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 0, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 0, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 0, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 0, - }, - visited, - ) - self.assertEqual( - { - "total": 0, - "status": {"active": 0, "waiting": 0, "completed": 0, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - self.assertEqual( - [ - { - "categories": [ - {"count": 0, "name": "Blue", "pct": 0.0}, - {"count": 0, "name": "Other", "pct": 0.0}, - ], - "key": "color", - "name": "color", - "total": 0, - } - ], - flow.get_category_counts(), - ) - - # runs all gone too - self.assertEqual(0, FlowRun.objects.filter(flow=flow).count()) - - # test that expirations don't change activity... start another contact in the flow - tupac = self.create_contact("Tupac Shakur", phone="+12065550725") - ( - MockSessionWriter(tupac, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(tupac, "azul")) - .visit(color_other) - .send_msg("I don't know that color. Try again.") - .visit(color_split) - .wait() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({color_split["uuid"]: 1}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 1, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 0, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 0, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 0, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 0, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 0, - }, - visited, - ) - self.assertEqual( - { - "total": 1, - "status": {"active": 0, "waiting": 1, "completed": 0, "expired": 0, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - - # now mark run has expired and make sure exit type counts updated - run = tupac.runs.get() - run.status = FlowRun.STATUS_EXPIRED - run.exited_on = timezone.now() - run.save(update_fields=("status", "exited_on")) - - (active, visited) = flow.get_activity() - - self.assertEqual({}, active) - self.assertEqual( - { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][-1]["uuid"]}:{color_other["uuid"]}': 1, - f'{color_other["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 0, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 0, - f'{beer_split["exits"][2]["uuid"]}:{name_prompt["uuid"]}': 0, - f'{name_prompt["exits"][0]["uuid"]}:{name_split["uuid"]}': 0, - f'{name_split["exits"][0]["uuid"]}:{end_prompt["uuid"]}': 0, - }, - visited, - ) - self.assertEqual( - { - "total": 1, - "status": {"active": 0, "waiting": 0, "completed": 0, "expired": 1, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - - # check that flow interruption counts properly - jimmy = self.create_contact("Jimmy Graham", phone="+12065558888") - ( - MockSessionWriter(jimmy, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(jimmy, "cyan")) - .visit(color_other) - .send_msg("I don't know that color. Try again.") - .visit(color_split) - .wait() - .save() - ) - - (active, visited) = flow.get_activity() - - self.assertEqual({color_split["uuid"]: 1}, active) - self.assertEqual( - { - "total": 2, - "status": {"active": 0, "waiting": 1, "completed": 0, "expired": 1, "interrupted": 0, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - - run = jimmy.runs.get() - run.status = FlowRun.STATUS_INTERRUPTED - run.exited_on = timezone.now() - run.save(update_fields=("status", "exited_on")) - - (active, visited) = flow.get_activity() - - self.assertEqual({}, active) - self.assertEqual( - { - "total": 2, - "status": {"active": 0, "waiting": 0, "completed": 0, "expired": 1, "interrupted": 1, "failed": 0}, - "completion": 0, - }, - flow.get_run_stats(), - ) - - def test_category_counts(self): - def assertCount(counts, result_key, category_name, truth): - found = False - for count in counts: - if count["key"] == result_key: - categories = count["categories"] - for category in categories: - if category["name"] == category_name: - found = True - self.assertEqual(category["count"], truth) - self.assertTrue(found) - - favorites = self.get_flow("favorites_v13") - flow_nodes = favorites.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_other = flow_nodes[1] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - name_prompt = flow_nodes[6] - name_split = flow_nodes[7] - - # add in some fake data - for i in range(0, 10): - contact = self.create_contact("Contact %d" % i, phone="+120655530%d" % i) - ( - MockSessionWriter(contact, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "blue")) - .set_result("Color", "blue", "Blue", "blue") - .visit(beer_prompt) - .send_msg("Good choice, I like Blue too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "primus")) - .set_result("Beer", "primus", "Primus", "primus") - .visit(name_prompt) - .send_msg("Lastly, what is your name?", self.channel) - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "russell")) - .set_result("Name", "russell", "All Responses", "russell") - .complete() - .save() - ) - - for i in range(0, 5): - contact = self.create_contact("Contact %d" % i, phone="+120655531%d" % i) - ( - MockSessionWriter(contact, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "red")) - .set_result("Color", "red", "Red", "red") - .visit(beer_prompt) - .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "primus")) - .set_result("Beer", "primus", "Primus", "primus") - .visit(name_prompt) - .send_msg("Lastly, what is your name?", self.channel) - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "earl")) - .set_result("Name", "earl", "All Responses", "earl") - .complete() - .save() - ) - - # test update flow values - for i in range(0, 5): - contact = self.create_contact("Contact %d" % i, phone="+120655532%d" % i) - ( - MockSessionWriter(contact, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "orange")) - .set_result("Color", "orange", "Other", "orange") - .visit(color_other) - .send_msg("I don't know that one, try again please.", self.channel) - .visit(color_split) - .wait() - .save() - .resume(msg=self.create_incoming_msg(contact, "green")) - .set_result("Color", "green", "Green", "green") - .visit(beer_prompt) - .send_msg("Good choice, I like Green too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "skol")) - .set_result("Beer", "skol", "Skol", "skol") - .visit(name_prompt) - .send_msg("Lastly, what is your name?", self.channel) - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "bobby")) - .set_result("Name", "bobby", "All Responses", "bobby") - .complete() - .save() - ) - - counts = favorites.get_category_counts() - - assertCount(counts, "color", "Blue", 10) - assertCount(counts, "color", "Red", 5) - assertCount(counts, "beer", "Primus", 15) - - # name shouldn't be included since it's open ended - self.assertNotIn('"name": "Name"', json.dumps(counts)) - - # five oranges went back and became greens - assertCount(counts, "color", "Other", 0) - assertCount(counts, "color", "Green", 5) - - # now remap the uuid for our color node - flow_json = favorites.get_definition() - flow_json = json.loads(json.dumps(flow_json).replace(color_split["uuid"], str(uuid4()))) - flow_nodes = flow_json["nodes"] - color_prompt = flow_nodes[0] - color_other = flow_nodes[1] - color_split = flow_nodes[2] - - favorites.save_revision(self.admin, flow_json) - - # send a few more runs through our updated flow - for i in range(0, 3): - contact = self.create_contact("Contact %d" % i, phone="+120655533%d" % i) - ( - MockSessionWriter(contact, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "red")) - .set_result("Color", "red", "Red", "red") - .visit(beer_prompt) - .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(contact, "turbo")) - .set_result("Beer", "turbo", "Turbo King", "turbo") - .visit(name_prompt) - .wait() - .save() - ) - - # should now have three more reds - counts = favorites.get_category_counts() - assertCount(counts, "color", "Red", 8) - assertCount(counts, "beer", "Turbo King", 3) - - # now delete the color split and repoint nodes to the beer split - flow_json["nodes"].pop(2) - for node in flow_json["nodes"]: - for exit in node["exits"]: - if exit.get("destination_uuid") == color_split["uuid"]: - exit["destination_uuid"] = beer_split["uuid"] - - favorites.save_revision(self.admin, flow_json) - - # now the color counts have been removed, but beer is still there - counts = favorites.get_category_counts() - self.assertEqual(["beer"], [c["key"] for c in counts]) - assertCount(counts, "beer", "Turbo King", 3) - - # make sure it still works after ze squashings - self.assertEqual(76, FlowCategoryCount.objects.all().count()) - FlowCategoryCount.squash() - self.assertEqual(9, FlowCategoryCount.objects.all().count()) - counts = favorites.get_category_counts() - assertCount(counts, "beer", "Turbo King", 3) - - # test tostring - str(FlowCategoryCount.objects.all().first()) - - # and if we delete our runs, things zero out - for run in FlowRun.objects.all(): - run.delete() - - counts = favorites.get_category_counts() - assertCount(counts, "beer", "Turbo King", 0) - - def test_category_counts_with_null_categories(self): - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[4] - - msg = self.create_incoming_msg(self.contact, "blue") - run = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=msg) - .set_result("Color", "blue", "Blue", "blue") - .complete() - .save() - ).session.runs.get() - - FlowCategoryCount.objects.get(category_name="Blue", result_name="Color", result_key="color", count=1) - - # get our run and clear the category - run = FlowRun.objects.get(flow=flow, contact=self.contact) - results = run.results - del results["color"]["category"] - results["color"]["created_on"] = timezone.now() - run.save(update_fields=["results", "modified_on"]) - - # should have added a negative one now - self.assertEqual(2, FlowCategoryCount.objects.filter(category_name="Blue", result_name="Color").count()) - FlowCategoryCount.objects.get(category_name="Blue", result_name="Color", result_key="color", count=-1) - - def test_start_counts(self): - # create start for 10 contacts - flow = self.create_flow("Test") - start = FlowStart.objects.create(org=self.org, flow=flow, created_by=self.admin) - for i in range(10): - start.contacts.add(self.create_contact("Bob", urns=[f"twitter:bobby{i}"])) - - # create runs for first 5 - for c in start.contacts.order_by("id")[:5]: - MockSessionWriter(contact=c, flow=flow, start=start).wait().save() - - # check our count - self.assertEqual(FlowStartCount.get_count(start), 5) - - # create runs for last 5 - for c in start.contacts.order_by("id")[5:]: - MockSessionWriter(contact=c, flow=flow, start=start).wait().save() - - # check our count - self.assertEqual(FlowStartCount.get_count(start), 10) - - # squash them - FlowStartCount.squash() - self.assertEqual(FlowStartCount.get_count(start), 10) - - def test_flow_keyword_update(self): - self.login(self.admin) - flow = Flow.create(self.org, self.admin, "Flow") - flow.flow_type = Flow.TYPE_SURVEY - flow.save() - - # keywords aren't an option for survey flows - response = self.client.get(reverse("flows.flow_update", args=[flow.pk])) - self.assertNotIn("keyword_triggers", response.context["form"].fields) - self.assertNotIn("ignore_triggers", response.context["form"].fields) - - # send update with triggers and ignore flag anyways - post_data = dict() - post_data["name"] = "Flow With Keyword Triggers" - post_data["keyword_triggers"] = "notallowed" - post_data["ignore_keywords"] = True - post_data["expires_after_minutes"] = 60 * 12 - response = self.client.post(reverse("flows.flow_update", args=[flow.pk]), post_data, follow=True) - - # still shouldn't have any triggers - flow.refresh_from_db() - self.assertFalse(flow.ignore_triggers) - self.assertEqual(0, flow.triggers.all().count()) - - def test_flow_update_of_inactive_flow(self): - flow = self.get_flow("favorites") - flow.release(self.admin) - - post_data = {"name": "Flow that does not exist"} - - self.login(self.admin) - response = self.client.post(reverse("flows.flow_update", args=[flow.pk]), post_data) - - # can't delete already released flow - self.assertEqual(response.status_code, 404) - - def test_flow_results_of_inactive_flow(self): - flow = self.get_flow("favorites") - flow.release(self.admin) - - self.login(self.admin) - response = self.client.get(reverse("flows.flow_results", args=[flow.uuid])) - - self.assertEqual(response.status_code, 404) - - def test_flow_results_with_hidden_results(self): - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_split = flow_nodes[4] - - # add a spec for a hidden result to this flow.. which should not be included below - flow.metadata[Flow.METADATA_RESULTS].append( - { - "key": "_color_classification", - "name": "_Color Classification", - "categories": ["Success", "Skipped", "Failure"], - "node_uuids": [color_split["uuid"]], - } - ) - - self.login(self.admin) - response = self.client.get(reverse("flows.flow_results", args=[flow.uuid])) - - self.assertEqual(response.status_code, 200) - self.assertEqual( - response.context["result_fields"], - [ - { - "key": "color", - "name": "Color", - "categories": ["Orange", "Blue", "Other", "Nothing"], - "node_uuids": [color_split["uuid"]], - "has_categories": "true", - } - ], - ) - - def test_legacy_validate_definition(self): - with self.assertRaises(ValueError): - FlowRevision.validate_legacy_definition({"flow_type": "U", "nodes": []}) - - with self.assertRaises(ValueError): - FlowRevision.validate_legacy_definition(self.get_flow_json("not_fully_localized")) - - # base_language of null, but spec version 8 - with self.assertRaises(ValueError): - FlowRevision.validate_legacy_definition(self.get_flow_json("no_base_language_v8")) - - # base_language of 'eng' but non localized actions - with self.assertRaises(ValueError): - FlowRevision.validate_legacy_definition(self.get_flow_json("non_localized_with_language")) - - with self.assertRaises(ValueError): - FlowRevision.validate_legacy_definition(self.get_flow_json("non_localized_ruleset")) - - def test_importing_dependencies(self): - # create channel to be matched by name - channel = self.create_channel("TG", "RapidPro Test", "12345324635") - - flow = self.get_flow("dependencies_v13") - flow_def = flow.get_definition() - - # global should have been created with blank value - self.assertTrue(self.org.globals.filter(name="Org Name", key="org_name", value="").exists()) - - # topic should have been created too - self.assertTrue(self.org.topics.filter(name="Support").exists()) - - # fields created with type if exists in export - self.assertTrue(self.org.fields.filter(key="cat_breed", name="Cat Breed", value_type="T").exists()) - self.assertTrue(self.org.fields.filter(key="french_age", value_type="N").exists()) - - # reference to channel changed to match existing channel by name - self.assertEqual( - {"uuid": str(channel.uuid), "name": "RapidPro Test"}, flow_def["nodes"][0]["actions"][4]["channel"] - ) - - # reference to classifier unchanged since it doesn't exist - self.assertEqual( - {"uuid": "891a1c5d-1140-4fd0-bd0d-a919ea25abb6", "name": "Feelings"}, - flow_def["nodes"][7]["actions"][0]["classifier"], - ) - - def test_flow_metadata(self): - # test importing both old and new flow formats - for flow_file in ("favorites", "favorites_v13"): - flow = self.get_flow(flow_file) - - self.assertEqual( - flow.metadata["results"], - [ - { - "key": "color", - "name": "Color", - "categories": ["Red", "Green", "Blue", "Cyan", "Other"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "beer", - "name": "Beer", - "categories": ["Mutzig", "Primus", "Turbo King", "Skol", "Other"], - "node_uuids": [matchers.UUID4String()], - }, - { - "key": "name", - "name": "Name", - "categories": ["All Responses"], - "node_uuids": [matchers.UUID4String()], - }, - ], - ) - self.assertEqual(len(flow.metadata["waiting_exit_uuids"]), 11) - self.assertEqual(len(flow.metadata["parent_refs"]), 0) - - def test_group_send(self): - # create an inactive group with the same name, to test that this doesn't blow up our import - group = ContactGroup.get_or_create(self.org, self.admin, "Survey Audience") - group.release(self.admin) - - # and create another as well - ContactGroup.get_or_create(self.org, self.admin, "Survey Audience") - - # fetching a flow with a group send shouldn't throw - self.get_flow("group_send_flow") - - def test_flow_delete_of_inactive_flow(self): - flow = self.get_flow("favorites") - flow.release(self.admin) - - self.login(self.admin) - response = self.client.post(reverse("flows.flow_delete", args=[flow.pk])) - - # can't delete already released flow - self.assertEqual(response.status_code, 404) - - def test_delete(self): - flow = self.get_flow("favorites_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - - # create a campaign that contains this flow - friends = self.create_group("Friends", []) - poll_date = self.create_field("poll_date", "Poll Date", value_type=ContactField.TYPE_DATETIME) - - campaign = Campaign.create(self.org, self.admin, Campaign.get_unique_name(self.org, "Favorite Poll"), friends) - event1 = CampaignEvent.create_flow_event( - self.org, self.admin, campaign, poll_date, offset=0, unit="D", flow=flow, delivery_hour="13" - ) - - # create a trigger that contains this flow - trigger = Trigger.create( - self.org, self.admin, Trigger.TYPE_KEYWORD, flow, keywords=["poll"], match_type=Trigger.MATCH_FIRST_WORD - ) - - # run the flow - ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "RED")) - .visit(beer_prompt) - .visit(beer_split) - .wait() - .save() - ) - - # run it again to completion - joe = self.create_contact("Joe", phone="1234") - ( - MockSessionWriter(joe, flow) - .visit(color_prompt) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(joe, "green")) - .visit(beer_prompt) - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(joe, "primus")) - .complete() - .save() - ) - - # try to remove the flow, not logged in, no dice - response = self.client.post(reverse("flows.flow_delete", args=[flow.uuid])) - self.assertLoginRedirect(response) - - # login as admin - self.login(self.admin) - response = self.client.post(reverse("flows.flow_delete", args=[flow.uuid])) - self.assertEqual(200, response.status_code) - - # flow should no longer be active - flow.refresh_from_db() - self.assertFalse(flow.is_active) - - # runs should not be deleted - self.assertEqual(flow.runs.count(), 2) - - # our campaign event and trigger should no longer be active - event1.refresh_from_db() - self.assertFalse(event1.is_active) - - trigger.refresh_from_db() - self.assertFalse(trigger.is_active) - - def test_delete_with_dependencies(self): - self.login(self.admin) - - self.get_flow("dependencies") - self.get_flow("dependencies_voice") - parent = Flow.objects.filter(name="Dependencies").first() - child = Flow.objects.filter(name="Child Flow").first() - voice = Flow.objects.filter(name="Voice Dependencies").first() - - contact_fields = ( - {"key": "contact_age", "name": "Contact Age"}, - # fields based on parent and child references - {"key": "top"}, - {"key": "bottom"}, - # replies - {"key": "chw"}, - # url attachemnts - {"key": "attachment"}, - # dynamic groups - {"key": "cat_breed", "name": "Cat Breed"}, - {"key": "organization"}, - # sending messages - {"key": "recipient"}, - {"key": "message"}, - # sending emails - {"key": "email_message", "name": "Email Message"}, - {"key": "subject"}, - # trigger someone else - {"key": "other_phone", "name": "Other Phone"}, - # rules and localizations - {"key": "rule"}, - {"key": "french_rule", "name": "French Rule"}, - {"key": "french_age", "name": "French Age"}, - {"key": "french_fries", "name": "French Fries"}, - # updating contacts - {"key": "favorite_cat", "name": "Favorite Cat"}, - {"key": "next_cat_fact", "name": "Next Cat Fact"}, - {"key": "last_cat_fact", "name": "Last Cat Fact"}, - # webhook urls - {"key": "webhook"}, - # expression splits - {"key": "expression_split", "name": "Expression Split"}, - # voice says - {"key": "play_message", "name": "Play Message", "flow": voice}, - {"key": "voice_rule", "name": "Voice Rule", "flow": voice}, - # voice plays (recordings) - {"key": "voice_recording", "name": "Voice Recording", "flow": voice}, - ) - - for field_spec in contact_fields: - key = field_spec.get("key") - name = field_spec.get("name", key.capitalize()) - flow = field_spec.get("flow", parent) - - # make sure our field exists after import - field = self.org.fields.filter(key=key, name=name, is_system=False, is_proxy=False).first() - self.assertIsNotNone(field, "Couldn't find field %s (%s)" % (key, name)) - - # and our flow is dependent on us - self.assertIsNotNone( - flow.field_dependencies.filter(key__in=[key]).first(), - "Flow is missing dependency on %s (%s)" % (key, name), - ) - - # we can delete our child flow and the parent ('Dependencies') will be marked as having issues - self.client.post(reverse("flows.flow_delete", args=[child.uuid])) - - parent = Flow.objects.filter(name="Dependencies").get() - child.refresh_from_db() - - self.assertFalse(child.is_active) - self.assertTrue(parent.has_issues) - self.assertNotIn(child, parent.flow_dependencies.all()) - - # deleting our parent flow should also work - self.client.post(reverse("flows.flow_delete", args=[parent.uuid])) - - parent.refresh_from_db() - self.assertFalse(parent.is_active) - self.assertEqual(0, parent.field_dependencies.all().count()) - self.assertEqual(0, parent.flow_dependencies.all().count()) - self.assertEqual(0, parent.group_dependencies.all().count()) - - def test_update_expiration_task(self): - flow1 = self.create_flow("Test 1") - flow2 = self.create_flow("Test 2") - - # create waiting session and run for flow 1 - session1 = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=self.contact, - current_flow=flow1, - status=FlowSession.STATUS_WAITING, - output_url="http://sessions.com/123.json", - wait_started_on=datetime(2022, 1, 1, 0, 0, 0, 0, tzone.utc), - wait_expires_on=datetime(2022, 1, 2, 0, 0, 0, 0, tzone.utc), - wait_resume_on_expire=False, - ) - - # create non-waiting session for flow 1 - session2 = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=self.contact, - current_flow=flow1, - status=FlowSession.STATUS_COMPLETED, - output_url="http://sessions.com/234.json", - wait_started_on=datetime(2022, 1, 1, 0, 0, 0, 0, tzone.utc), - wait_expires_on=None, - wait_resume_on_expire=False, - ended_on=timezone.now(), - ) - - # create waiting session for flow 2 - session3 = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=self.contact, - current_flow=flow2, - status=FlowSession.STATUS_WAITING, - output_url="http://sessions.com/345.json", - wait_started_on=datetime(2022, 1, 1, 0, 0, 0, 0, tzone.utc), - wait_expires_on=datetime(2022, 1, 2, 0, 0, 0, 0, tzone.utc), - wait_resume_on_expire=False, - ) - - # update flow 1 expires to 2 hours - flow1.expires_after_minutes = 120 - flow1.save(update_fields=("expires_after_minutes",)) - - update_session_wait_expires(flow1.id) - - # new session expiration should be wait_started_on + 1 hour - session1.refresh_from_db() - self.assertEqual(datetime(2022, 1, 1, 2, 0, 0, 0, tzone.utc), session1.wait_expires_on) - - # other sessions should be unchanged - session2.refresh_from_db() - session3.refresh_from_db() - self.assertIsNone(session2.wait_expires_on) - self.assertEqual(datetime(2022, 1, 2, 0, 0, 0, 0, tzone.utc), session3.wait_expires_on) - - -class FlowCRUDLTest(TembaTest, CRUDLTestMixin): - def test_menu(self): - menu_url = reverse("flows.flow_menu") - - FlowLabel.create(self.org, self.admin, "Important") - - self.assertRequestDisallowed(menu_url, [None, self.agent]) - self.assertPageMenu( - menu_url, - self.admin, - [ - "Active", - "Archived", - "Globals", - ("History", ["Webhooks", "Flow Starts"]), - ("Labels", ["Important (0)"]), - ], - ) - - def test_create(self): - create_url = reverse("flows.flow_create") - self.create_flow("Registration") - - self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) - response = self.assertCreateFetch( - create_url, - [self.editor, self.admin], - form_fields=["name", "keyword_triggers", "flow_type", "base_language"], - ) - - # check flow type options - self.assertEqual( - [ - (Flow.TYPE_MESSAGE, "Messaging"), - (Flow.TYPE_VOICE, "Phone Call"), - (Flow.TYPE_BACKGROUND, "Background"), - ], - response.context["form"].fields["flow_type"].choices, - ) - - # try to submit without name or language - self.assertCreateSubmit( - create_url, - self.admin, - {"flow_type": "M"}, - form_errors={"name": "This field is required.", "base_language": "This field is required."}, - ) - - # try to submit with a name that contains disallowed characters - self.assertCreateSubmit( - create_url, - self.admin, - {"name": '"Registration"', "flow_type": "M", "base_language": "eng"}, - form_errors={"name": 'Cannot contain the character: "'}, - ) - - # try to submit with a name that is too long - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "X" * 65, "flow_type": "M", "base_language": "eng"}, - form_errors={"name": "Ensure this value has at most 64 characters (it has 65)."}, - ) - - # try to submit with a name that is already used - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Registration", "flow_type": "M", "base_language": "eng"}, - form_errors={"name": "Already used by another flow."}, - ) - - response = self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Flow 1", "flow_type": "M", "base_language": "eng"}, - new_obj_query=Flow.objects.filter(org=self.org, flow_type="M", name="Flow 1"), - ) - - flow1 = Flow.objects.get(name="Flow 1") - self.assertEqual(1, flow1.revisions.all().count()) - - self.assertRedirect(response, reverse("flows.flow_editor", args=[flow1.uuid])) - - def test_create_with_keywords(self): - create_url = reverse("flows.flow_create") - - # try creating a flow with invalid keywords - self.assertCreateSubmit( - create_url, - self.admin, - { - "name": "Flow #1", - "base_language": "eng", - "keyword_triggers": ["toooooooooooooolong", "test"], - "flow_type": Flow.TYPE_MESSAGE, - }, - form_errors={ - "keyword_triggers": "Must be single words, less than 16 characters, containing only letters and numbers." - }, - ) - - # submit with valid keywords - self.assertCreateSubmit( - create_url, - self.admin, - { - "name": "Flow 1", - "base_language": "eng", - "keyword_triggers": ["testing", "test"], - "flow_type": Flow.TYPE_MESSAGE, - }, - new_obj_query=Flow.objects.filter(org=self.org, name="Flow 1", flow_type="M"), - ) - - # check the created keyword trigger - flow1 = Flow.objects.get(name="Flow 1") - self.assertEqual(1, flow1.triggers.count()) - self.assertEqual(1, flow1.triggers.filter(trigger_type="K", keywords=["testing", "test"]).count()) - - # try to create another flow with one of the same keywords - self.assertCreateSubmit( - create_url, - self.admin, - { - "name": "Flow 2", - "base_language": "eng", - "keyword_triggers": ["test"], - "flow_type": Flow.TYPE_MESSAGE, - }, - form_errors={"keyword_triggers": '"test" is already used for another flow.'}, - ) - - # add a group to the existing trigger - group = self.create_group("Testers", contacts=[]) - flow1.triggers.get().groups.add(group) - - # and now it's no longer a conflict - self.assertCreateSubmit( - create_url, - self.admin, - { - "name": "Flow 2", - "base_language": "eng", - "keyword_triggers": ["test"], - "flow_type": Flow.TYPE_MESSAGE, - }, - new_obj_query=Flow.objects.filter(org=self.org, name="Flow 2", flow_type="M"), - ) - - # check the created keyword triggers - flow2 = Flow.objects.get(name="Flow 2") - self.assertEqual([["test"]], list(flow2.triggers.order_by("id").values_list("keywords", flat=True))) - - def test_views(self): - create_url = reverse("flows.flow_create") - - self.create_contact("Eric", phone="+250788382382") - flow = self.get_flow("color") - - # create a flow for another org - other_flow = Flow.create(self.org2, self.admin2, "Flow2") - - # no login, no list - response = self.client.get(reverse("flows.flow_list")) - self.assertRedirect(response, reverse("users.user_login")) - - user = self.admin - user.first_name = "Test" - user.last_name = "Contact" - user.save() - self.login(user) - - self.assertContentMenu(reverse("flows.flow_list"), self.user, ["Export"]) - - self.assertContentMenu( - reverse("flows.flow_list"), - self.admin, - ["New Flow", "New Label", "Import", "Export"], - ) - - # list, should have only one flow (the one created in setUp) - response = self.client.get(reverse("flows.flow_list")) - self.assertEqual(1, len(response.context["object_list"])) - - # inactive list shouldn't have any flows - response = self.client.get(reverse("flows.flow_archived")) - self.assertEqual(0, len(response.context["object_list"])) - - # also shouldn't be able to view other flow - response = self.client.get(reverse("flows.flow_editor", args=[other_flow.uuid])) - self.assertEqual(302, response.status_code) - - # get our create page - response = self.client.get(create_url) - self.assertTrue(response.context["has_flows"]) - - # create a new regular flow - response = self.client.post( - create_url, {"name": "Flow 1", "flow_type": Flow.TYPE_MESSAGE, "base_language": "eng"} - ) - self.assertEqual(302, response.status_code) - - # check we've been redirected to the editor and we have a revision - flow1 = Flow.objects.get(org=self.org, name="Flow 1") - self.assertEqual(f"/flow/editor/{flow1.uuid}/", response.url) - self.assertEqual(1, flow1.revisions.all().count()) - self.assertEqual(Flow.TYPE_MESSAGE, flow1.flow_type) - self.assertEqual(10080, flow1.expires_after_minutes) - - # add a trigger on this flow - trigger = Trigger.create( - self.org, - self.admin, - Trigger.TYPE_KEYWORD, - flow1, - keywords=["unique"], - match_type=Trigger.MATCH_FIRST_WORD, - ) - - # create a new voice flow - response = self.client.post( - create_url, {"name": "Voice Flow", "flow_type": Flow.TYPE_VOICE, "base_language": "eng"} - ) - voice_flow = Flow.objects.get(org=self.org, name="Voice Flow") - self.assertEqual(response.status_code, 302) - self.assertEqual(voice_flow.flow_type, "V") - - # default expiration for voice is shorter - self.assertEqual(voice_flow.expires_after_minutes, 5) - - # test flows with triggers - # create a new flow with one unformatted keyword - response = self.client.post( - create_url, - { - "name": "Flow With Unformated Keyword Triggers", - "keyword_triggers": ["this is", "it"], - "base_language": "eng", - }, - ) - self.assertFormError( - response.context["form"], - "keyword_triggers", - "Must be single words, less than 16 characters, containing only letters and numbers.", - ) - - # create a new flow with one existing keyword - response = self.client.post( - create_url, {"name": "Flow With Existing Keyword Triggers", "keyword_triggers": ["this", "is", "unique"]} - ) - self.assertFormError(response.context["form"], "keyword_triggers", '"unique" is already used for another flow.') - - # create another trigger so there are two in the way - trigger = Trigger.create( - self.org, - self.admin, - Trigger.TYPE_KEYWORD, - flow1, - keywords=["this"], - match_type=Trigger.MATCH_FIRST_WORD, - ) - - response = self.client.post( - create_url, {"name": "Flow With Existing Keyword Triggers", "keyword_triggers": ["this", "is", "unique"]} - ) - self.assertFormError( - response.context["form"], "keyword_triggers", '"this", "unique" are already used for another flow.' - ) - trigger.delete() - - # create a new flow with keywords - response = self.client.post( - create_url, - { - "name": "Flow With Good Keyword Triggers", - "base_language": "eng", - "keyword_triggers": ["this", "is", "it"], - "flow_type": Flow.TYPE_MESSAGE, - "expires_after_minutes": 30, - }, - ) - flow3 = Flow.objects.get(name="Flow With Good Keyword Triggers") - - # check we're being redirected to the editor view - self.assertRedirect(response, reverse("flows.flow_editor", args=[flow3.uuid])) - - # can see results for a flow - response = self.client.get(reverse("flows.flow_results", args=[flow.uuid])) - self.assertEqual(200, response.status_code) - - # check flow listing - response = self.client.get(reverse("flows.flow_list")) - self.assertEqual(list(response.context["object_list"]), [flow3, voice_flow, flow1, flow]) # by saved_on - - # test update view - response = self.client.post(reverse("flows.flow_update", args=[flow.id])) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.context["form"].fields), 5) - self.assertIn("name", response.context["form"].fields) - self.assertIn("keyword_triggers", response.context["form"].fields) - self.assertIn("ignore_triggers", response.context["form"].fields) - - # test ivr flow creation - self.channel.role = "SRCA" - self.channel.save() - - response = self.client.post( - create_url, - { - "name": "Message flow", - "base_language": "eng", - "expires_after_minutes": 5, - "flow_type": Flow.TYPE_MESSAGE, - }, - ) - msg_flow = Flow.objects.get(name="Message flow") - - self.assertEqual(302, response.status_code) - self.assertEqual(msg_flow.flow_type, Flow.TYPE_MESSAGE) - - response = self.client.post( - create_url, - {"name": "Call flow", "base_language": "eng", "expires_after_minutes": 5, "flow_type": Flow.TYPE_VOICE}, - ) - call_flow = Flow.objects.get(name="Call flow") - - self.assertEqual(302, response.status_code) - self.assertEqual(call_flow.flow_type, Flow.TYPE_VOICE) - - # test creating a flow with base language - self.org.set_flow_languages(self.admin, ["eng"]) - - response = self.client.post( - create_url, - { - "name": "Language Flow", - "expires_after_minutes": 5, - "base_language": "eng", - "flow_type": Flow.TYPE_MESSAGE, - }, - ) - - language_flow = Flow.objects.get(name="Language Flow") - - self.assertEqual(302, response.status_code) - self.assertEqual(language_flow.base_language, "eng") - - def test_update_messaging_flow(self): - flow = self.get_flow("color_v13") - update_url = reverse("flows.flow_update", args=[flow.id]) - - def assert_triggers(expected: list): - actual = list(flow.triggers.filter(trigger_type="K", is_active=True).values("keywords", "is_archived")) - self.assertCountEqual(actual, expected) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields={ - "name": "Colors", - "keyword_triggers": [], - "expires_after_minutes": 720, - "ignore_triggers": False, - }, - ) - - # try to update with empty name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "", "expires_after_minutes": 10, "ignore_triggers": True}, - form_errors={"name": "This field is required."}, - object_unchanged=flow, - ) - - # update all fields - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "New Name", - "keyword_triggers": ["test", "help"], - "expires_after_minutes": 10, - "ignore_triggers": True, - }, - ) - - flow.refresh_from_db() - self.assertEqual("New Name", flow.name) - self.assertEqual(10, flow.expires_after_minutes) - self.assertTrue(flow.ignore_triggers) - - assert_triggers([{"keywords": ["test", "help"], "is_archived": False}]) - - # remove one keyword and add another - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "New Name", - "keyword_triggers": ["help", "support"], - "expires_after_minutes": 10, - "ignore_triggers": True, - }, - ) - - assert_triggers( - [ - {"keywords": ["test", "help"], "is_archived": True}, - {"keywords": ["help", "support"], "is_archived": False}, - ] - ) - - # put "test" keyword back and remove "support" - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "New Name", - "keyword_triggers": ["test", "help"], - "expires_after_minutes": 10, - "ignore_triggers": True, - }, - ) - - assert_triggers( - [ - {"keywords": ["test", "help"], "is_archived": False}, - {"keywords": ["help", "support"], "is_archived": True}, - ] - ) - - # add channel filter to active trigger - support = flow.triggers.get(is_archived=False) - support.channel = self.channel - support.save(update_fields=("channel",)) - - # re-adding "support" will now restore that trigger - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "New Name", - "keyword_triggers": ["test", "help", "support"], - "expires_after_minutes": 10, - "ignore_triggers": True, - }, - ) - - assert_triggers( - [ - {"keywords": ["test", "help"], "is_archived": False}, - {"keywords": ["help", "support"], "is_archived": False}, - ] - ) - - def test_update_voice_flow(self): - flow = self.get_flow("ivr") - update_url = reverse("flows.flow_update", args=[flow.id]) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch( - update_url, - [self.editor, self.admin], - form_fields=["name", "keyword_triggers", "expires_after_minutes", "ignore_triggers", "ivr_retry"], - ) - - # try to update with an expires value which is only for messaging flows and an invalid retry value - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "New Name", "expires_after_minutes": 720, "ignore_triggers": True, "ivr_retry": 1234}, - form_errors={ - "expires_after_minutes": "Select a valid choice. 720 is not one of the available choices.", - "ivr_retry": "Select a valid choice. 1234 is not one of the available choices.", - }, - object_unchanged=flow, - ) - - # update name and contact creation option to be per login - self.assertUpdateSubmit( - update_url, - self.admin, - { - "name": "New Name", - "keyword_triggers": ["test", "help"], - "expires_after_minutes": 10, - "ignore_triggers": True, - "ivr_retry": 30, - }, - ) - - flow.refresh_from_db() - self.assertEqual("New Name", flow.name) - self.assertEqual(10, flow.expires_after_minutes) - self.assertTrue(flow.ignore_triggers) - self.assertEqual(30, flow.metadata.get("ivr_retry")) - self.assertEqual(1, flow.triggers.count()) - self.assertEqual(1, flow.triggers.filter(keywords=["test", "help"]).count()) - - # check we still have that value after saving a new revision - flow.save_revision(self.admin, flow.get_definition()) - self.assertEqual(30, flow.metadata["ivr_retry"]) - - def test_update_surveyor_flow(self): - flow = self.get_flow("media_survey") - update_url = reverse("flows.flow_update", args=[flow.id]) - - # we should only see name and contact creation option on form - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=["name", "contact_creation"]) - - # update name and contact creation option to be per login - self.assertUpdateSubmit(update_url, self.admin, {"name": "New Name", "contact_creation": "login"}) - - flow.refresh_from_db() - self.assertEqual("New Name", flow.name) - self.assertEqual("login", flow.metadata.get("contact_creation")) - - def test_update_background_flow(self): - flow = self.get_flow("background") - update_url = reverse("flows.flow_update", args=[flow.id]) - - # we should only see name on form - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=["name"]) - - # update name and contact creation option to be per login - self.assertUpdateSubmit(update_url, self.admin, {"name": "New Name"}) - - flow.refresh_from_db() - self.assertEqual("New Name", flow.name) - - def test_list_views(self): - flow1 = self.get_flow("color_v13") - flow2 = self.get_flow("no_ruleset_flow") - - # archive second flow - flow2.is_archived = True - flow2.save(update_fields=("is_archived",)) - - flow3 = Flow.create(self.org, self.admin, "Flow 3") - - self.login(self.admin) - - # see our trigger on the list page - response = self.client.get(reverse("flows.flow_list")) - self.assertContains(response, flow1.name) - self.assertContains(response, flow3.name) - self.assertEqual(2, response.context["folders"][0]["count"]) - self.assertEqual(1, response.context["folders"][1]["count"]) - - # archive it - response = self.client.post(reverse("flows.flow_list"), {"action": "archive", "objects": flow1.id}) - self.assertEqual(200, response.status_code) - - # flow should no longer appear in list - response = self.client.get(reverse("flows.flow_list")) - self.assertNotContains(response, flow1.name) - self.assertContains(response, flow3.name) - self.assertEqual(1, response.context["folders"][0]["count"]) - self.assertEqual(2, response.context["folders"][1]["count"]) - - self.assertEqual(("archive", "label", "export-results"), response.context["actions"]) - - # but does appear in archived list - response = self.client.get(reverse("flows.flow_archived")) - self.assertContains(response, flow1.name) - - # flow2 should appear before flow since it was created later - self.assertTrue(flow2, response.context["object_list"][0]) - self.assertTrue(flow1, response.context["object_list"][1]) - - # unarchive it - response = self.client.post(reverse("flows.flow_archived"), {"action": "restore", "objects": flow1.id}) - self.assertEqual(200, response.status_code) - - # flow should no longer appear in archived list - response = self.client.get(reverse("flows.flow_archived")) - self.assertNotContains(response, flow1.name) - self.assertEqual(("restore",), response.context["actions"]) - - # but does appear in normal list - response = self.client.get(reverse("flows.flow_list")) - self.assertContains(response, flow1.name) - self.assertContains(response, flow3.name) - self.assertEqual(2, response.context["folders"][0]["count"]) - self.assertEqual(1, response.context["folders"][1]["count"]) - - # can label flows - label1 = FlowLabel.create(self.org, self.admin, "Important") - - response = self.client.post( - reverse("flows.flow_list"), {"action": "label", "objects": flow1.id, "label": label1.id} - ) - - self.assertEqual(200, response.status_code) - self.assertEqual({label1}, set(flow1.labels.all())) - self.assertEqual({flow1}, set(label1.flows.all())) - - # and unlabel - response = self.client.post( - reverse("flows.flow_list"), {"action": "label", "objects": flow1.id, "label": label1.id, "add": False} - ) - - self.assertEqual(200, response.status_code) - - flow1.refresh_from_db() - self.assertEqual(set(), set(flow1.labels.all())) - - # voice flows should be included in the count - Flow.objects.filter(id=flow1.id).update(flow_type=Flow.TYPE_VOICE) - - response = self.client.get(reverse("flows.flow_list")) - self.assertContains(response, flow1.name) - self.assertEqual(2, response.context["folders"][0]["count"]) - self.assertEqual(1, response.context["folders"][1]["count"]) - - # single message flow (flom campaign) should not be included in counts and not even on this list - Flow.objects.filter(id=flow1.id).update(is_system=True) - - response = self.client.get(reverse("flows.flow_list")) - - self.assertNotContains(response, flow1.name) - self.assertEqual(1, response.context["folders"][0]["count"]) - self.assertEqual(1, response.context["folders"][1]["count"]) - - # single message flow should not be even in the archived list - Flow.objects.filter(id=flow1.id).update(is_system=True, is_archived=True) - - response = self.client.get(reverse("flows.flow_archived")) - self.assertNotContains(response, flow1.name) - self.assertEqual(1, response.context["folders"][0]["count"]) - self.assertEqual(1, response.context["folders"][1]["count"]) # only flow2 - - def test_filter(self): - flow1 = self.create_flow("Flow 1") - flow2 = self.create_flow("Flow 2") - - label1 = FlowLabel.create(self.org, self.admin, "Important") - label2 = FlowLabel.create(self.org, self.admin, "Very Important") - - label1.toggle_label([flow1, flow2], add=True) - label2.toggle_label([flow2], add=True) - - self.login(self.admin) - - response = self.client.get(reverse("flows.flow_filter", args=[label1.uuid])) - self.assertEqual([flow2, flow1], list(response.context["object_list"])) - self.assertEqual(2, len(response.context["labels"])) - self.assertEqual(("label", "export-results"), response.context["actions"]) - - response = self.client.get(reverse("flows.flow_filter", args=[label2.uuid])) - self.assertEqual([flow2], list(response.context["object_list"])) - - response = self.client.get(reverse("flows.flow_filter", args=[label2.uuid])) - self.assertEqual(f"/flow/labels/{label2.uuid}", response.headers.get(TEMBA_MENU_SELECTION)) - - def test_get_definition(self): - flow = self.get_flow("color_v13") - - # if definition is outdated, metadata values are updated from db object - flow.name = "Amazing Flow" - flow.save(update_fields=("name",)) - - self.assertEqual("Amazing Flow", flow.get_definition()["name"]) - - # make a flow that looks like a legacy flow - flow = self.get_flow("color_v11") - original_def = self.get_flow_json("color_v11") - - flow.version_number = "11.12" - flow.save(update_fields=("version_number",)) - - revision = flow.revisions.get() - revision.definition = original_def - revision.spec_version = "11.12" - revision.save(update_fields=("definition", "spec_version")) - - self.assertIn("metadata", flow.get_definition()) - - # if definition is outdated, metadata values are updated from db object - flow.name = "Amazing Flow 2" - flow.save(update_fields=("name",)) - - self.assertEqual("Amazing Flow 2", flow.get_definition()["metadata"]["name"]) - - # metadata section can be missing too - del original_def["metadata"] - revision.definition = original_def - revision.save(update_fields=("definition",)) - - self.assertEqual("Amazing Flow 2", flow.get_definition()["metadata"]["name"]) - - def test_fetch_revisions(self): - self.login(self.admin) - - # we should have one revision for an imported flow - flow = self.get_flow("color_v11") - original_def = self.get_flow_json("color_v11") - - # rewind definition to legacy spec - revision = flow.revisions.get() - revision.definition = original_def - revision.spec_version = "11.12" - revision.save(update_fields=("definition", "spec_version")) - - # create a new migrated revision - flow_def = revision.get_migrated_definition() - flow.save_revision(self.admin, flow_def) - - revisions = list(flow.revisions.all().order_by("-created_on")) - - # now we should have two revisions - self.assertEqual(2, len(revisions)) - self.assertEqual(2, revisions[0].revision) - self.assertEqual(Flow.CURRENT_SPEC_VERSION, revisions[0].spec_version) - self.assertEqual(1, revisions[1].revision) - self.assertEqual("11.12", revisions[1].spec_version) - - response = self.client.get(reverse("flows.flow_revisions", args=[flow.uuid])) - self.assertEqual( - [ - { - "user": {"email": "admin@nyaruka.com", "name": "Andy"}, - "created_on": matchers.ISODate(), - "id": revisions[0].id, - "version": Flow.CURRENT_SPEC_VERSION, - "revision": 2, - }, - { - "user": {"email": "admin@nyaruka.com", "name": "Andy"}, - "created_on": matchers.ISODate(), - "id": revisions[1].id, - "version": "11.12", - "revision": 1, - }, - ], - response.json()["results"], - ) - - # now make our legacy revision invalid - definition = original_def.copy() - del definition["base_language"] - revisions[1].definition = definition - revisions[1].save(update_fields=("definition",)) - - # should be back to one valid revision (the non-legacy one) - response = self.client.get(reverse("flows.flow_revisions", args=[flow.uuid])) - self.assertEqual(1, len(response.json()["results"])) - - # fetch that revision - revision_id = response.json()["results"][0]["id"] - response = self.client.get(f"{reverse('flows.flow_revisions', args=[flow.uuid])}{revision_id}/") - - # make sure we can read the definition - definition = response.json()["definition"] - self.assertEqual("und", definition["language"]) - - # really break the legacy revision - revisions[1].definition = {"foo": "bar"} - revisions[1].save(update_fields=("definition",)) - - # should still have only one valid revision - response = self.client.get(reverse("flows.flow_revisions", args=[flow.uuid])) - self.assertEqual(1, len(response.json()["results"])) - - # fix the legacy revision - revisions[1].definition = original_def.copy() - revisions[1].save(update_fields=("definition",)) - - # fetch that revision - response = self.client.get(f"{reverse('flows.flow_revisions', args=[flow.uuid])}{revisions[1].id}/") - - # should automatically migrate to latest spec - self.assertEqual(Flow.CURRENT_SPEC_VERSION, response.json()["definition"]["spec_version"]) - - # but we can also limit how far it is migrated - response = self.client.get( - f"{reverse('flows.flow_revisions', args=[flow.uuid])}{revisions[1].id}/?version=13.0.0" - ) - - # should only have been migrated to that version - self.assertEqual("13.0.0", response.json()["definition"]["spec_version"]) - - def test_save_revisions(self): - flow = self.create_flow("Go Flow") - revisions_url = reverse("flows.flow_revisions", args=[flow.uuid]) - - self.login(self.admin) - response = self.client.get(revisions_url) - self.assertEqual(1, len(response.json())) - - definition = flow.revisions.all().first().definition - - # viewers can't save flows - self.login(self.user) - response = self.client.post(revisions_url, definition, content_type="application/json") - self.assertEqual(403, response.status_code) - - # check that we can create a new revision - self.login(self.admin) - response = self.client.post(revisions_url, definition, content_type="application/json") - new_revision = response.json() - self.assertEqual(2, new_revision["revision"][Flow.DEFINITION_REVISION]) - - # but we can't save our old revision - response = self.client.post(revisions_url, definition, content_type="application/json") - self.assertResponseError( - response, "description", "Your changes will not be saved until you refresh your browser" - ) - - # or save an old version - definition = flow.revisions.all().first().definition - definition[Flow.DEFINITION_SPEC_VERSION] = "11.12" - response = self.client.post(revisions_url, definition, content_type="application/json") - self.assertResponseError(response, "description", "Your flow has been upgraded to the latest version") - - def test_inactive_flow(self): - flow = self.get_flow("color_v13") - flow.release(self.admin) - - self.login(self.admin) - - response = self.client.get(reverse("flows.flow_revisions", args=[flow.uuid])) - - self.assertEqual(404, response.status_code) - - response = self.client.get(reverse("flows.flow_activity", args=[flow.uuid])) - - self.assertEqual(404, response.status_code) - - @mock_mailroom - @patch("temba.flows.models.Flow.is_starting") - def test_preview_start(self, mr_mocks, mock_flow_is_starting): - mock_flow_is_starting.return_value = False - - with override_brand(inactive_threshold=1000): - flow = self.create_flow("Test") - self.create_field("age", "Age") - self.create_contact("Ann", phone="+16302222222", fields={"age": 40}) - self.create_contact("Bob", phone="+16303333333", fields={"age": 33}) - - mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) - - preview_url = reverse("flows.flow_preview_start", args=[flow.id]) - - self.login(self.editor) - - response = self.client.post( - preview_url, - { - "query": "age > 30", - "exclusions": {"non_active": True, "started_previously": True}, - }, - content_type="application/json", - ) - self.assertEqual( - { - "query": 'age > 30 AND status = "active" AND history != "Test Flow"', - "total": 100, - "warnings": [], - "blockers": [], - }, - response.json(), - ) - - # try with a bad query - mr_mocks.exception(mailroom.QueryValidationException("mismatched input at (((", "syntax")) - - response = self.client.post( - preview_url, - { - "query": "(((", - "exclusions": {"non_active": True, "started_previously": True}, - }, - content_type="application/json", - ) - self.assertEqual(400, response.status_code) - self.assertEqual({"query": "", "total": 0, "error": "Invalid query syntax."}, response.json()) - - # suspended orgs should block - self.org.is_suspended = True - self.org.save() - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post(preview_url, {"query": "age > 30"}, content_type="application/json") - self.assertEqual( - [ - "Sorry, your workspace is currently suspended. To re-enable starting flows and sending messages, please contact support." - ], - response.json()["blockers"], - ) - - # flagged orgs should block - self.org.is_suspended = False - self.org.is_flagged = True - self.org.save() - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post(preview_url, {"query": "age > 30"}, content_type="application/json") - self.assertEqual( - [ - "Sorry, your workspace is currently flagged. To re-enable starting flows and sending messages, please contact support." - ], - response.json()["blockers"], - ) - - self.org.is_flagged = False - self.org.save() - - # trying to start again should fail because there is already a pending start for this flow - mock_flow_is_starting.return_value = True - mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) - - response = self.client.post( - preview_url, - { - "query": "age > 30", - "exclusions": {"non_active": True, "started_previously": True}, - }, - content_type="application/json", - ) - - self.assertEqual( - [ - "This flow is already being started - please wait until that process completes before starting more contacts." - ], - response.json()["blockers"], - ) - - ivr_flow = self.create_flow("IVR Test", flow_type=Flow.TYPE_VOICE) - - preview_url = reverse("flows.flow_preview_start", args=[ivr_flow.id]) - - # shouldn't be able to since we don't have a call channel - mock_flow_is_starting.return_value = False - mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) - - response = self.client.post( - preview_url, - { - "query": "age > 30", - "exclusions": {"non_active": True, "started_previously": True}, - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["blockers"][0], - 'To start this flow you need to add a voice channel to your workspace which will allow you to make and receive calls.', - ) - - # check warning for lots of contacts - preview_url = reverse("flows.flow_preview_start", args=[flow.id]) - mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=10000) - - response = self.client.post( - preview_url, - { - "query": "age > 30", - "exclusions": {"non_active": True, "started_previously": True}, - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["warnings"][0], - "You've selected a lot of contacts! Depending on your channel " - "it could take days to reach everybody and could reduce response rates. " - "Filter for contacts that have sent a message recently " - "to limit your selection to contacts who are more likely to respond.", - ) - - # if we release our send channel we also can't start a regular messaging flow - self.channel.release(self.admin) - mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) - - response = self.client.post( - preview_url, - { - "query": "age > 30", - "exclusions": {"non_active": True, "started_previously": True}, - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["blockers"][0], - 'To start this flow you need to add a channel to your workspace which will allow you to send messages to your contacts.', - ) - - @mock_mailroom - def test_template_warnings(self, mr_mocks): - self.login(self.admin) - flow = self.get_flow("whatsapp_template") - - # bring up broadcast dialog - self.login(self.admin) - - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - # no warning, we don't have a whatsapp channel - self.assertEqual(response.json()["warnings"], []) - - # change our channel to use a whatsapp scheme - self.channel.schemes = [URN.WHATSAPP_SCHEME] - self.channel.channel_type = "TWA" - self.channel.save() - - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - # no warning, we don't have a whatsapp channel that requires a message template - self.assertEqual(response.json()["warnings"], []) - - self.channel.channel_type = "WA" - self.channel.save() - - # clear dependencies, this will cause our flow to look like it isn't using templates - metadata = flow.metadata - flow.metadata = {} - flow.save(update_fields=["metadata"]) - - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["warnings"], - [ - "This flow does not use message templates. You may still start this flow but WhatsApp contacts who have not sent an incoming message in the last 24 hours may not receive it." - ], - ) - - # restore our dependency - flow.metadata = metadata - flow.save(update_fields=["metadata"]) - - # template doesn't exit, will be warned - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["warnings"], - ["The message template affirmation does not exist on your account and cannot be sent."], - ) - - # create the template, but no translations - template = self.create_template("affirmation", [], uuid="f712e05c-bbed-40f1-b3d9-671bb9b60775") - - # will be warned again - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["warnings"], ["Your message template affirmation is not approved and cannot be sent."] - ) - - # create a translation, but not approved - TemplateTranslation.objects.create( - template=template, - channel=self.channel, - locale="eng-US", - status=TemplateTranslation.STATUS_REJECTED, - external_id="id1", - external_locale="en_US", - namespace="foo_namespace", - components=[{"name": "body", "type": "body/text", "content": "Hello", "variables": {}, "params": []}], - variables=[], - ) - - # will be warned again - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - self.assertEqual( - response.json()["warnings"], ["Your message template affirmation is not approved and cannot be sent."] - ) - - # finally, set our translation to approved - TemplateTranslation.objects.update(status=TemplateTranslation.STATUS_APPROVED) - - # no warnings - mr_mocks.flow_start_preview(query="age > 30", total=2) - response = self.client.post( - reverse("flows.flow_preview_start", args=[flow.id]), - { - "query": "age > 30", - }, - content_type="application/json", - ) - - self.assertEqual(response.json()["warnings"], []) - - @mock_mailroom - def test_start(self, mr_mocks): - contact = self.create_contact("Bob", phone="+593979099111") - flow = self.create_flow("Test") - start_url = f"{reverse('flows.flow_start', args=[])}?flow={flow.id}" - - self.assertRequestDisallowed(start_url, [None, self.user, self.agent]) - self.assertUpdateFetch(start_url, [self.editor, self.admin], form_fields=["flow", "contact_search"]) - - # create flow start with a query - mr_mocks.contact_parse_query("frank", cleaned='name ~ "frank"') - self.assertUpdateSubmit( - start_url, - self.admin, - {"flow": flow.id, "contact_search": get_contact_search(query="frank")}, - ) - - start = FlowStart.objects.get() - self.assertEqual(flow, start.flow) - self.assertEqual(FlowStart.STATUS_PENDING, start.status) - self.assertEqual({}, start.exclusions) - self.assertEqual('name ~ "frank"', start.query) - - self.assertEqual(1, len(mr_mocks.queued_batch_tasks)) - self.assertEqual("start_flow", mr_mocks.queued_batch_tasks[0]["type"]) - - FlowStart.objects.all().delete() - - # create flow start with a bogus query - mr_mocks.exception(mailroom.QueryValidationException("query contains an error", "syntax")) - self.assertUpdateSubmit( - start_url, - self.admin, - {"flow": flow.id, "contact_search": get_contact_search(query='name = "frank')}, - form_errors={"contact_search": "Invalid query syntax."}, - object_unchanged=flow, - ) - - # try missing contacts - self.assertUpdateSubmit( - start_url, - self.admin, - {"flow": flow.id, "contact_search": get_contact_search(contacts=[])}, - form_errors={"contact_search": "Contacts or groups are required."}, - object_unchanged=flow, - ) - - # try to create with an empty query - self.assertUpdateSubmit( - start_url, - self.admin, - {"flow": flow.id, "contact_search": get_contact_search(query="")}, - form_errors={"contact_search": "A contact query is required."}, - object_unchanged=flow, - ) - - query = f"uuid='{contact.uuid}'" - mr_mocks.contact_parse_query(query, cleaned=query) - - # create flow start with exclude_in_other and exclude_reruns both left unchecked - self.assertUpdateSubmit( - start_url, - self.admin, - {"flow": flow.id, "contact_search": get_contact_search(query=query)}, - ) - - start = FlowStart.objects.get() - - self.assertEqual(query, start.query) - self.assertEqual(flow, start.flow) - self.assertEqual(FlowStart.TYPE_MANUAL, start.start_type) - self.assertEqual(FlowStart.STATUS_PENDING, start.status) - self.assertEqual({}, start.exclusions) - - self.assertEqual(2, len(mr_mocks.queued_batch_tasks)) - self.assertEqual("start_flow", mr_mocks.queued_batch_tasks[1]["type"]) - - FlowStart.objects.all().delete() - - @mock_mailroom - def test_broadcast_background_flow(self, mr_mocks): - flow = self.create_flow("Background", flow_type=Flow.TYPE_BACKGROUND) - - # create flow start with a query - mr_mocks.contact_parse_query("frank", cleaned='name ~ "frank"') - - start_url = f"{reverse('flows.flow_start', args=[])}?flow={flow.id}" - self.assertUpdateSubmit( - start_url, self.admin, {"flow": flow.id, "contact_search": get_contact_search(query="frank")} - ) - - start = FlowStart.objects.get() - self.assertEqual(flow, start.flow) - self.assertEqual(FlowStart.STATUS_PENDING, start.status) - self.assertEqual({}, start.exclusions) - self.assertEqual('name ~ "frank"', start.query) - - def test_copy_view(self): - flow = self.get_flow("color") - - self.login(self.admin) - - response = self.client.post(reverse("flows.flow_copy", args=[flow.id])) - - flow_copy = Flow.objects.get(org=self.org, name="Copy of %s" % flow.name) - - self.assertRedirect(response, reverse("flows.flow_editor", args=[flow_copy.uuid])) - - def test_recent_contacts(self): - flow = self.create_flow("Test") - contact1 = self.create_contact("Bob", phone="0979111111") - contact2 = self.create_contact("", phone="0979222222") - node1_exit1_uuid = "805f5073-ce96-4b6a-ab9f-e77dd412f83b" - node2_uuid = "fcc47dc4-306b-4b2f-ad72-7e53f045c3c4" - - seg1_url = reverse("flows.flow_recent_contacts", args=[flow.uuid, node1_exit1_uuid, node2_uuid]) - - # nothing set in redis just means empty list - self.assertRequestDisallowed(seg1_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(seg1_url, [self.user, self.editor, self.admin]) - self.assertEqual([], response.json()) - - def add_recent_contact(exit_uuid: str, dest_uuid: str, contact, text: str, ts: float): - r = get_redis_connection() - member = f"{uuid4()}|{contact.id}|{text}" # text is prefixed with a random value to keep it unique - r.zadd(f"recent_contacts:{exit_uuid}:{dest_uuid}", mapping={member: ts}) - - add_recent_contact(node1_exit1_uuid, node2_uuid, contact1, "Hi there", 1639338554.969123) - add_recent_contact(node1_exit1_uuid, node2_uuid, contact2, "|x|", 1639338555.234567) - add_recent_contact(node1_exit1_uuid, node2_uuid, contact1, "Sounds good", 1639338561.345678) - - response = self.assertReadFetch(seg1_url, [self.user, self.editor, self.admin]) - self.assertEqual( - [ - { - "contact": {"uuid": str(contact1.uuid), "name": "Bob"}, - "operand": "Sounds good", - "time": "2021-12-12T19:49:21.345678+00:00", - }, - { - "contact": {"uuid": str(contact2.uuid), "name": "0979 222 222"}, - "operand": "|x|", - "time": "2021-12-12T19:49:15.234567+00:00", - }, - { - "contact": {"uuid": str(contact1.uuid), "name": "Bob"}, - "operand": "Hi there", - "time": "2021-12-12T19:49:14.969123+00:00", - }, - ], - response.json(), - ) - - def test_results(self): - flow = self.get_flow("favorites_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - name_prompt = flow_nodes[6] - name_split = flow_nodes[7] - end_prompt = flow_nodes[8] - - pete = self.create_contact("Pete", phone="+12065553027") - pete_session = ( - MockSessionWriter(pete, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(pete, "blue")) - .set_result("Color", "blue", "Blue", "blue") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Blue too! What is your favorite beer?") - .visit(beer_split) - .wait() - .save() - ) - - jimmy = self.create_contact("Jimmy", phone="+12065553026") - ( - MockSessionWriter(jimmy, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(jimmy, "red")) - .set_result("Color", "red", "Red", "red") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Red too! What is your favorite beer?") - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(jimmy, "turbo")) - .set_result("Beer", "turbo", "Turbo King", "turbo") - .visit(name_prompt, exit_index=2) - .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") - .visit(name_split) - .wait() - .save() - ) - - john = self.create_contact("John", phone="+12065553028") - ( - MockSessionWriter(john, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .fail("some error") - .save() - ) - - self.login(self.admin) - - response = self.client.get(reverse("flows.flow_results", args=[flow.uuid])) - self.assertEqual(200, response.status_code) - - # fetch counts endpoint, should have 2 color results (one is a test contact) - response = self.client.get(reverse("flows.flow_category_counts", args=[flow.uuid])) - counts = response.json()["counts"] - self.assertEqual("Color", counts[0]["name"]) - self.assertEqual(2, counts[0]["total"]) - - FlowCRUDL.ActivityChart.HISTOGRAM_MIN = 0 - FlowCRUDL.ActivityChart.PERIOD_MIN = 0 - - # and some charts - response = self.client.get(reverse("flows.flow_activity_data", args=[flow.id])) - data = response.json() - - # we have two waiting runs, one failed run - self.assertEqual(data["summary"]["failed"], 1) - self.assertEqual(data["summary"]["active"], 0) - self.assertEqual(data["summary"]["waiting"], 2) - self.assertEqual(data["summary"]["completed"], 0) - self.assertEqual(data["summary"]["expired"], 0) - self.assertEqual(data["summary"]["interrupted"], 0) - self.assertEqual(data["summary"]["title"], "3 Responses") - - # now complete the flow for Pete - ( - pete_session.resume(msg=self.create_incoming_msg(pete, "primus")) - .set_result("Beer", "primus", "Primus", "primus") - .visit(name_prompt) - .send_msg("Mmmmm... delicious Primus. Lastly, what is your name?") - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(pete, "Pete")) - .visit(end_prompt) - .complete() - .save() - ) - - # now only one waiting, one completed, one failed and 5 total responses - response = self.client.get(reverse("flows.flow_activity_data", args=[flow.id])) - data = response.json() - - self.assertEqual(data["summary"]["failed"], 1) - self.assertEqual(data["summary"]["active"], 0) - self.assertEqual(data["summary"]["waiting"], 1) - self.assertEqual(data["summary"]["completed"], 1) - self.assertEqual(data["summary"]["expired"], 0) - self.assertEqual(data["summary"]["interrupted"], 0) - self.assertEqual(data["summary"]["title"], "5 Responses") - - # they all happened on the same day - response = self.client.get(reverse("flows.flow_activity_data", args=[flow.id])) - data = response.json() - points = data["histogram"] - self.assertEqual(1, len(points)) - - # put one of our counts way in the past so we get a different histogram scale - count = FlowPathCount.objects.filter(flow=flow).order_by("id")[1] - count.period = count.period - timedelta(days=25) - count.save() - - response = self.client.get(reverse("flows.flow_activity_data", args=[flow.id])) - data = response.json() - points = data["histogram"] - self.assertTrue(timedelta(days=24).total_seconds() * 1000 < (points[1][0] - points[0][0])) - - # pick another scale - count.period = count.period - timedelta(days=600) - count.save() - response = self.client.get(reverse("flows.flow_activity_data", args=[flow.id])) - - # this should give us a more compressed histogram - data = response.json() - points = data["histogram"] - self.assertTrue(timedelta(days=620).total_seconds() * 1000 < (points[1][0] - points[0][0])) - - self.assertEqual(24, len(data["hod"])) - self.assertEqual(7, len(data["dow"])) - - def test_activity(self): - flow = self.get_flow("favorites_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - - pete = self.create_contact("Pete", phone="+12065553027") - ( - MockSessionWriter(pete, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(pete, "blue")) - .set_result("Color", "blue", "Blue", "blue") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Blue too! What is your favorite beer?") - .visit(beer_split) - .wait() - .save() - ) - - self.login(self.admin) - response = self.client.get(reverse("flows.flow_activity", args=[flow.uuid])) - - self.assertEqual(200, response.status_code) - self.assertEqual( - { - "is_starting": False, - "nodes": {beer_split["uuid"]: 1}, - "segments": { - f'{color_prompt["exits"][0]["uuid"]}:{color_split["uuid"]}': 1, - f'{color_split["exits"][2]["uuid"]}:{beer_prompt["uuid"]}': 1, - f'{beer_prompt["exits"][0]["uuid"]}:{beer_split["uuid"]}': 1, - }, - }, - response.json(), - ) - - def test_activity_chart_of_inactive_flow(self): - flow = self.get_flow("favorites") - flow.release(self.admin) - - self.login(self.admin) - response = self.client.get(reverse("flows.flow_activity_chart", args=[flow.id])) - - self.assertEqual(404, response.status_code) - - def test_category_counts_of_inactive_flow(self): - flow = self.get_flow("favorites") - flow.release(self.admin) - - self.login(self.admin) - response = self.client.get(reverse("flows.flow_category_counts", args=[flow.uuid])) - - self.assertEqual(404, response.status_code) - - def test_write_protection(self): - flow = self.get_flow("favorites_v13") - flow_json = flow.get_definition() - flow_json_copy = flow_json.copy() - - self.assertEqual(1, flow_json["revision"]) - - self.login(self.admin) - - # saving should work - flow.save_revision(self.admin, flow_json) - - self.assertEqual(2, flow_json["revision"]) - - # we can't save with older revision number - with self.assertRaises(FlowUserConflictException): - flow.save_revision(self.admin, flow_json_copy) - - # make flow definition invalid by creating a duplicate node UUID - mode0_uuid = flow_json["nodes"][0]["uuid"] - flow_json["nodes"][1]["uuid"] = mode0_uuid - - with self.assertRaises(mailroom.FlowValidationException) as cm: - flow.save_revision(self.admin, flow_json) - - self.assertEqual(f"node UUID {mode0_uuid} isn't unique", str(cm.exception)) - - # check view converts exception to error response - response = self.client.post( - reverse("flows.flow_revisions", args=[flow.uuid]), data=flow_json, content_type="application/json" - ) - - self.assertEqual(400, response.status_code) - self.assertEqual( - { - "status": "failure", - "description": "Your flow failed validation. Please refresh your browser.", - "detail": f"node UUID {mode0_uuid} isn't unique", - }, - response.json(), - ) - - def test_change_language(self): - self.org.set_flow_languages(self.admin, ["eng", "spa", "ara"]) - - flow = self.get_flow("favorites_v13") - - change_url = reverse("flows.flow_change_language", args=[flow.id]) - - self.assertUpdateSubmit( - change_url, - self.admin, - {"language": ""}, - form_errors={"language": "This field is required."}, - object_unchanged=flow, - ) - - self.assertUpdateSubmit( - change_url, - self.admin, - {"language": "fra"}, - form_errors={"language": "Not a valid language."}, - object_unchanged=flow, - ) - - self.assertUpdateSubmit(change_url, self.admin, {"language": "spa"}, success_status=302) - - flow_def = flow.get_definition() - self.assertIn("eng", flow_def["localization"]) - self.assertEqual("¿Cuál es tu color favorito?", flow_def["nodes"][0]["actions"][0]["text"]) - - def test_export_results(self): - export_url = reverse("flows.flow_export_results") - - flow1 = self.create_flow("Test 1") - flow2 = self.create_flow("Test 2") - testers = self.create_group("Testers", contacts=[]) - gender = self.create_field("gender", "Gender") - - self.assertRequestDisallowed(export_url, [None, self.agent]) - response = self.assertUpdateFetch( - export_url + f"?ids={flow1.id},{flow2.id}", - [self.user, self.editor, self.admin], - form_fields=( - "start_date", - "end_date", - "with_fields", - "with_groups", - "flows", - "extra_urns", - "responded_only", - ), - ) - self.assertNotContains(response, "already an export in progress") - - # anon orgs don't see urns option - with self.anonymous(self.org): - response = self.client.get(export_url) - self.assertEqual( - ["start_date", "end_date", "with_fields", "with_groups", "flows", "responded_only", "loc"], - list(response.context["form"].fields.keys()), - ) - - # create a dummy export task so that we won't be able to export - blocking_export = ResultsExport.create( - self.org, self.admin, start_date=date.today() - timedelta(days=7), end_date=date.today() - ) - - response = self.client.get(export_url) - self.assertContains(response, "already an export in progress") - - # check we can't submit in case a user opens the form and whilst another user is starting an export - response = self.client.post( - export_url, {"start_date": "2022-06-28", "end_date": "2022-09-28", "flows": [flow1.id]} - ) - self.assertContains(response, "already an export in progress") - self.assertEqual(1, Export.objects.count()) - - # mark that one as finished so it's no longer a blocker - blocking_export.status = Export.STATUS_COMPLETE - blocking_export.save(update_fields=("status",)) - - # try to submit with no values - response = self.client.post(export_url, {}) - self.assertFormError(response.context["form"], "start_date", "This field is required.") - self.assertFormError(response.context["form"], "end_date", "This field is required.") - self.assertFormError(response.context["form"], "flows", "This field is required.") - - response = self.client.post( - export_url, - { - "start_date": "2022-06-28", - "end_date": "2022-09-28", - "flows": [flow1.id], - "with_groups": [testers.id], - "with_fields": [gender.id], - }, - ) - self.assertEqual(200, response.status_code) - - export = Export.objects.exclude(id=blocking_export.id).get() - self.assertEqual("results", export.export_type) - self.assertEqual(date(2022, 6, 28), export.start_date) - self.assertEqual(date(2022, 9, 28), export.end_date) - self.assertEqual( - { - "flow_ids": [flow1.id], - "with_groups": [testers.id], - "with_fields": [gender.id], - "extra_urns": [], - "responded_only": False, - }, - export.config, - ) - - def test_export_and_download_translation(self): - self.org.set_flow_languages(self.admin, ["spa"]) - - flow = self.get_flow("favorites") - export_url = reverse("flows.flow_export_translation", args=[flow.id]) - - self.assertRequestDisallowed(export_url, [None, self.agent, self.admin2]) - self.assertUpdateFetch(export_url, [self.user, self.editor, self.admin], form_fields=["language"]) - - # submit with no language - response = self.assertUpdateSubmit(export_url, self.admin, {}, success_status=200) - - download_url = response["Temba-Success"] - self.assertEqual(f"/flow/download_translation/?flow={flow.id}&language=", download_url) - - # check fetching the PO from the download link - with patch("temba.mailroom.client.client.MailroomClient.po_export") as mock_po_export: - mock_po_export.return_value = b'msgid "Red"\nmsgstr "Roja"\n\n' - self.assertRequestDisallowed(download_url, [None, self.agent, self.admin2]) - response = self.assertReadFetch(download_url, [self.user, self.editor, self.admin]) - - self.assertEqual(b'msgid "Red"\nmsgstr "Roja"\n\n', response.content) - self.assertEqual('attachment; filename="favorites.po"', response["Content-Disposition"]) - self.assertEqual("text/x-gettext-translation", response["Content-Type"]) - - # submit with a language - response = self.assertUpdateSubmit(export_url, self.admin, {"language": "spa"}, success_status=200) - - download_url = response["Temba-Success"] - self.assertEqual(f"/flow/download_translation/?flow={flow.id}&language=spa", download_url) - - # check fetching the PO from the download link - with patch("temba.mailroom.client.client.MailroomClient.po_export") as mock_po_export: - mock_po_export.return_value = b'msgid "Red"\nmsgstr "Roja"\n\n' - response = self.requestView(download_url, self.admin) - - # filename includes language now - self.assertEqual('attachment; filename="favorites.spa.po"', response["Content-Disposition"]) - - def test_import_translation(self): - self.org.set_flow_languages(self.admin, ["eng", "spa"]) - - flow = self.get_flow("favorites_v13") - step1_url = reverse("flows.flow_import_translation", args=[flow.id]) - - # check step 1 is just a file upload - self.assertRequestDisallowed(step1_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch(step1_url, [self.editor, self.admin], form_fields=["po_file"]) - - # submit with no file - self.assertUpdateSubmit( - step1_url, self.admin, {}, form_errors={"po_file": "This field is required."}, object_unchanged=flow - ) - - # submit with something that's empty - response = self.requestView(step1_url, self.admin, post_data={"po_file": io.BytesIO(b"")}) - self.assertFormError(response.context["form"], "po_file", "The submitted file is empty.") - - # submit with something that's not a valid PO file - response = self.requestView(step1_url, self.admin, post_data={"po_file": io.BytesIO(b"msgid")}) - self.assertFormError(response.context["form"], "po_file", "File doesn't appear to be a valid PO file.") - - # submit with something that's in the base language of the flow - po_file = io.BytesIO( - b""" -#, fuzzy -msgid "" -msgstr "" -"POT-Creation-Date: 2018-07-06 12:30+0000\\n" -"Language: en\\n" -"Language-3: eng\\n" - -msgid "Blue" -msgstr "Bluuu" - """ - ) - response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) - self.assertFormError( - response.context["form"], - "po_file", - "Contains translations in English which is the base language of this flow.", - ) - - # submit with something that's in the base language of the flow - po_file = io.BytesIO( - b""" -#, fuzzy -msgid "" -msgstr "" -"POT-Creation-Date: 2018-07-06 12:30+0000\\n" -"Language: fr\\n" -"Language-3: fra\\n" - -msgid "Blue" -msgstr "Bleu" - """ - ) - response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) - self.assertFormError( - response.context["form"], - "po_file", - "Contains translations in French which is not a supported translation language.", - ) - - # submit with something that doesn't have an explicit language - po_file = io.BytesIO( - b""" -msgid "Blue" -msgstr "Azul" - """ - ) - response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) - - self.assertEqual(302, response.status_code) - self.assertIn(f"/flow/import_translation/{flow.id}/?po=", response.url) - - response = self.assertUpdateFetch(response.url, [self.admin], form_fields=["language"]) - self.assertContains(response, "Unknown") - - # submit a different PO that does have language set - po_file = io.BytesIO( - b""" -#, fuzzy -msgid "" -msgstr "" -"POT-Creation-Date: 2018-07-06 12:30+0000\\n" -"Language: es\\n" -"MIME-Version: 1.0\\n" -"Content-Type: text/plain; charset=UTF-8\\n" -"Language-3: spa\\n" - -#: Favorites/8720f157-ca1c-432f-9c0b-2014ddc77094/name:0 -#: Favorites/a4d15ed4-5b24-407f-b86e-4b881f09a186/arguments:0 -msgid "Blue" -msgstr "Azul" -""" - ) - response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) - - self.assertEqual(302, response.status_code) - self.assertIn(f"/flow/import_translation/{flow.id}/?po=", response.url) - - step2_url = response.url - - response = self.assertUpdateFetch(step2_url, [self.admin], form_fields=["language"]) - self.assertContains(response, "Spanish (spa)") - self.assertEqual({"language": "spa"}, response.context["form"].initial) - - # confirm the import - with patch("temba.mailroom.client.client.MailroomClient.po_import") as mock_po_import: - mock_po_import.return_value = {"flows": [flow.get_definition()]} - - response = self.requestView(step2_url, self.admin, post_data={"language": "spa"}) - - # should redirect back to editor - self.assertEqual(302, response.status_code) - self.assertEqual(f"/flow/editor/{flow.uuid}/", response.url) - - # should have a new revision - self.assertEqual(2, flow.revisions.count()) - - -class FlowRunTest(TembaTest): - def setUp(self): - super().setUp() - - self.contact = self.create_contact("Ben Haggerty", phone="+250788123123") - - def test_status_counts(self): - contact = self.create_contact("Bob", phone="+1234567890") - session = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=self.contact, - status=FlowSession.STATUS_WAITING, - output_url="http://sessions.com/123.json", - created_on=timezone.now(), - wait_started_on=timezone.now(), - wait_expires_on=timezone.now() + timedelta(days=7), - wait_resume_on_expire=False, - ) - - def create_runs(flow_status_pairs: tuple) -> list: - runs = [] - for flow, status in flow_status_pairs: - runs.append( - FlowRun( - uuid=uuid4(), - org=self.org, - session=session, - flow=flow, - contact=contact, - status=status, - created_on=timezone.now(), - modified_on=timezone.now(), - exited_on=timezone.now() if status not in ("A", "W") else None, - ) - ) - return FlowRun.objects.bulk_create(runs) - - flow1 = self.create_flow("Test 1") - flow2 = self.create_flow("Test 2") - - runs1 = create_runs( - ( - (flow1, FlowRun.STATUS_ACTIVE), - (flow2, FlowRun.STATUS_WAITING), - (flow1, FlowRun.STATUS_ACTIVE), - (flow2, FlowRun.STATUS_WAITING), - (flow1, FlowRun.STATUS_WAITING), - (flow1, FlowRun.STATUS_COMPLETED), - ) - ) - - self.assertEqual( - {(flow1, "A"): 2, (flow2, "W"): 2, (flow1, "W"): 1, (flow1, "C"): 1}, - {(c.flow, c.status): c.count for c in FlowRunStatusCount.objects.all()}, - ) - self.assertEqual({"A": 2, "W": 1, "C": 1}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 2}, FlowRunStatusCount.get_totals(flow2)) - - # no difference after squashing - squash_flow_counts() - - self.assertEqual({"A": 2, "W": 1, "C": 1}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 2}, FlowRunStatusCount.get_totals(flow2)) - - runs2 = create_runs( - ( - (flow1, FlowRun.STATUS_ACTIVE), - (flow1, FlowRun.STATUS_ACTIVE), - (flow2, FlowRun.STATUS_EXPIRED), - ) - ) - - self.assertEqual({"A": 4, "W": 1, "C": 1}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 2, "X": 1}, FlowRunStatusCount.get_totals(flow2)) - - # bulk update runs like they're being interrupted - FlowRun.objects.filter(id__in=[r.id for r in runs1]).update( - status=FlowRun.STATUS_INTERRUPTED, exited_on=timezone.now() - ) - - self.assertEqual({"A": 2, "W": 0, "C": 0, "I": 4}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 0, "X": 1, "I": 2}, FlowRunStatusCount.get_totals(flow2)) - - # no difference after squashing - squash_flow_counts() - - self.assertEqual({"A": 2, "W": 0, "C": 0, "I": 4}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 0, "X": 1, "I": 2}, FlowRunStatusCount.get_totals(flow2)) - - # do manual deletion of some runs - FlowRun.objects.filter(id__in=[r.id for r in runs2]).update(delete_from_results=True) - FlowRun.objects.filter(id__in=[r.id for r in runs2]).delete() - - self.assertEqual({"A": 0, "W": 0, "C": 0, "I": 4}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 0, "X": 0, "I": 2}, FlowRunStatusCount.get_totals(flow2)) - - # do archival deletion of the rest - FlowRun.objects.filter(id__in=[r.id for r in runs1]).delete() - - # status counts are unchanged - self.assertEqual({"A": 0, "W": 0, "C": 0, "I": 4}, FlowRunStatusCount.get_totals(flow1)) - self.assertEqual({"W": 0, "X": 0, "I": 2}, FlowRunStatusCount.get_totals(flow2)) - - def test_as_archive_json(self): - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[4] - color_other = flow_nodes[3] - - msg_in = self.create_incoming_msg(self.contact, "green") - - run = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=msg_in) - .set_result("Color", "green", "Other", "green") - .visit(color_other) - .send_msg("That is a funny color. Try again.", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - run_json = run.as_archive_json() - - self.assertEqual( - set(run_json.keys()), - set( - [ - "id", - "uuid", - "flow", - "contact", - "responded", - "path", - "values", - "created_on", - "modified_on", - "exited_on", - "exit_type", - ] - ), - ) - - self.assertEqual(run.id, run_json["id"]) - self.assertEqual({"uuid": str(flow.uuid), "name": "Colors"}, run_json["flow"]) - self.assertEqual({"uuid": str(self.contact.uuid), "name": "Ben Haggerty"}, run_json["contact"]) - self.assertTrue(run_json["responded"]) - - self.assertEqual( - [ - {"node": matchers.UUID4String(), "time": matchers.ISODate()}, - {"node": matchers.UUID4String(), "time": matchers.ISODate()}, - {"node": matchers.UUID4String(), "time": matchers.ISODate()}, - {"node": matchers.UUID4String(), "time": matchers.ISODate()}, - ], - run_json["path"], - ) - - self.assertEqual( - { - "color": { - "category": "Other", - "input": "green", - "name": "Color", - "node": matchers.UUID4String(), - "time": matchers.ISODate(), - "value": "green", - } - }, - run_json["values"], - ) - - self.assertEqual(run.created_on.isoformat(), run_json["created_on"]) - self.assertEqual(run.modified_on.isoformat(), run_json["modified_on"]) - self.assertIsNone(run_json["exit_type"]) - self.assertIsNone(run_json["exited_on"]) - - def _check_deletion(self, by_archiver: bool, expected: dict, session_completed=True): - """ - Runs our favorites flow, then deletes the run and asserts our final state - """ - - flow = self.get_flow("favorites_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - name_prompt = flow_nodes[6] - name_split = flow_nodes[7] - end_prompt = flow_nodes[8] - - start = FlowStart.create(flow, self.admin, contacts=[self.contact]) - if session_completed: - ( - MockSessionWriter(self.contact, flow, start) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "blue")) - .set_result("Color", "blue", "Blue", "blue") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Blue too! What is your favorite beer?") - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "primus")) - .set_result("Beer", "primus", "Primus", "primus") - .visit(name_prompt, exit_index=2) - .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") - .visit(name_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "Ryan Lewis")) - .visit(end_prompt) - .complete() - .save() - ) - else: - ( - MockSessionWriter(self.contact, flow, start) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "blue")) - .set_result("Color", "blue", "Blue", "blue") - .visit(beer_prompt, exit_index=2) - .send_msg("Good choice, I like Blue too! What is your favorite beer?") - .visit(beer_split) - .wait() - .resume(msg=self.create_incoming_msg(self.contact, "primus")) - .set_result("Beer", "primus", "Primus", "primus") - .visit(name_prompt, exit_index=2) - .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") - .visit(name_split) - .wait() - .save() - ) - - run = FlowRun.objects.get(contact=self.contact) - if by_archiver: - super(FlowRun, run).delete() # delete_from_counts left unset - else: - run.delete() # delete_from_counts updated to true - - cat_counts = {c["key"]: c for c in flow.get_category_counts()} - - self.assertEqual(2, len(cat_counts)) - self.assertEqual(expected["red_count"], cat_counts["color"]["categories"][0]["count"]) - self.assertEqual(expected["primus_count"], cat_counts["color"]["categories"][0]["count"]) - - self.assertEqual(expected["start_count"], FlowStartCount.get_count(start)) - self.assertEqual(expected["run_count"], flow.get_run_stats()) - - self.assertFalse(FlowRun.objects.filter(id=run.id).exists()) - - @patch("temba.mailroom.queue_interrupt") - def test_delete_by_user_with_complete_session(self, mock_queue_interrupt): - self._check_deletion( - by_archiver=False, - expected={ - "red_count": 0, - "primus_count": 0, - "start_count": 1, # unchanged - "run_count": { - "total": 0, - "status": { - "active": 0, - "waiting": 0, - "completed": 0, - "expired": 0, - "interrupted": 0, - "failed": 0, - }, - "completion": 0, - }, - }, - ) - self.assertFalse(mock_queue_interrupt.called) - - @patch("temba.mailroom.queue_interrupt") - def test_delete_by_user_without_complete_session(self, mock_queue_interrupt): - self._check_deletion( - by_archiver=False, - expected={ - "red_count": 0, - "primus_count": 0, - "start_count": 1, # unchanged - "run_count": { - "total": 0, - "status": { - "active": 0, - "waiting": 0, - "completed": 0, - "expired": 0, - "interrupted": 0, - "failed": 0, - }, - "completion": 0, - }, - }, - session_completed=False, - ) - mock_queue_interrupt.assert_called_once() - - @patch("temba.mailroom.queue_interrupt") - def test_delete_by_archiver(self, mock_queue_interrupt): - self._check_deletion( - by_archiver=True, - expected={ - "red_count": 1, - "primus_count": 1, - "start_count": 1, # unchanged - "run_count": { # unchanged - "total": 1, - "status": { - "active": 0, - "waiting": 0, - "completed": 1, - "expired": 0, - "interrupted": 0, - "failed": 0, - }, - "completion": 100, - }, - }, - ) - self.assertFalse(mock_queue_interrupt.called) - - def test_big_ids(self): - # create a session and run with big ids - session = FlowSession.objects.create( - id=3_000_000_000, - uuid=uuid4(), - org=self.org, - contact=self.contact, - status=FlowSession.STATUS_WAITING, - output_url="http://sessions.com/123.json", - created_on=timezone.now(), - wait_started_on=timezone.now(), - wait_expires_on=timezone.now() + timedelta(days=7), - wait_resume_on_expire=False, - ) - FlowRun.objects.create( - id=4_000_000_000, - uuid=uuid4(), - org=self.org, - session=session, - flow=self.create_flow("Test"), - contact=self.contact, - status=FlowRun.STATUS_WAITING, - created_on=timezone.now(), - modified_on=timezone.now(), - path=[ - { - "uuid": "b5c3421c-3bbb-4dc7-9bda-683456588a6d", - "node_uuid": "857a1498-3d5f-40f5-8185-2ce596ce2677", - "arrived_on": "2021-12-20T08:47:30.123Z", - "exit_uuid": "6fc14d2c-3b4d-49c7-b342-4b2b2ebf7678", - }, - { - "uuid": "4a254612-8437-47e1-b7bd-feb97ee60bf6", - "node_uuid": "59d992c6-c491-473d-a7e9-4f431d705c01", - "arrived_on": "2021-12-20T08:47:30.234Z", - "exit_uuid": None, - }, - ], - current_node_uuid="59d992c6-c491-473d-a7e9-4f431d705c01", - ) - self.assertEqual( - {"6fc14d2c-3b4d-49c7-b342-4b2b2ebf7678:59d992c6-c491-473d-a7e9-4f431d705c01": 1}, - {f"{c.from_uuid}:{c.to_uuid}": c.count for c in FlowPathCount.objects.all()}, - ) - self.assertEqual( - {"59d992c6-c491-473d-a7e9-4f431d705c01": 1}, - {str(c.node_uuid): c.count for c in FlowNodeCount.objects.all()}, - ) - - -class FlowRunCRUDLTest(TembaTest, CRUDLTestMixin): - def test_delete(self): - contact = self.create_contact("Ann", phone="+1234567890") - flow = self.create_flow("Test") - - run1 = FlowRun.objects.create( - uuid=uuid4(), - org=self.org, - flow=flow, - contact=contact, - status=FlowRun.STATUS_COMPLETED, - created_on=timezone.now(), - modified_on=timezone.now(), - exited_on=timezone.now(), - ) - run2 = FlowRun.objects.create( - uuid=uuid4(), - org=self.org, - flow=flow, - contact=contact, - status=FlowRun.STATUS_COMPLETED, - created_on=timezone.now(), - modified_on=timezone.now(), - exited_on=timezone.now(), - ) - - delete_url = reverse("flows.flowrun_delete", args=[run1.id]) - - self.assertDeleteSubmit(delete_url, self.admin, object_deleted=run1, success_status=200) - - self.assertFalse(FlowRun.objects.filter(id=run1.id).exists()) - self.assertTrue(FlowRun.objects.filter(id=run2.id).exists()) # unchanged - - -class FlowSessionTest(TembaTest): - @mock_mailroom - def test_interrupt(self, mr_mocks): - contact = self.create_contact("Ben Haggerty", phone="+250788123123") - - def create_session(org, created_on: datetime): - return FlowSession.objects.create( - uuid=uuid4(), - org=org, - contact=contact, - created_on=created_on, - output_url="http://sessions.com/123.json", - status=FlowSession.STATUS_WAITING, - wait_started_on=timezone.now(), - wait_expires_on=timezone.now() + timedelta(days=7), - wait_resume_on_expire=False, - ) - - create_session(self.org, timezone.now() - timedelta(days=88)) - session2 = create_session(self.org, timezone.now() - timedelta(days=90)) - session3 = create_session(self.org, timezone.now() - timedelta(days=91)) - session4 = create_session(self.org2, timezone.now() - timedelta(days=92)) - - interrupt_flow_sessions() - - self.assertEqual( - [ - { - "type": "interrupt_sessions", - "org_id": self.org.id, - "queued_on": matchers.Datetime(), - "task": {"session_ids": [session2.id, session3.id]}, - }, - { - "type": "interrupt_sessions", - "org_id": self.org2.id, - "queued_on": matchers.Datetime(), - "task": {"session_ids": [session4.id]}, - }, - ], - mr_mocks.queued_batch_tasks, - ) - - def test_trim(self): - contact = self.create_contact("Ben Haggerty", phone="+250788123123") - flow = self.get_flow("color") - - # create some runs that have sessions - session1 = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=contact, - output_url="http://sessions.com/123.json", - status=FlowSession.STATUS_WAITING, - wait_started_on=timezone.now(), - wait_expires_on=timezone.now() + timedelta(days=7), - wait_resume_on_expire=False, - ) - session2 = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=contact, - output_url="http://sessions.com/234.json", - status=FlowSession.STATUS_WAITING, - wait_started_on=timezone.now(), - wait_expires_on=timezone.now() + timedelta(days=7), - wait_resume_on_expire=False, - ) - session3 = FlowSession.objects.create( - uuid=uuid4(), - org=self.org, - contact=contact, - output_url="http://sessions.com/345.json", - status=FlowSession.STATUS_WAITING, - wait_started_on=timezone.now(), - wait_expires_on=timezone.now() + timedelta(days=7), - wait_resume_on_expire=False, - ) - run1 = FlowRun.objects.create( - org=self.org, flow=flow, contact=contact, session=session1, status=FlowRun.STATUS_WAITING - ) - run2 = FlowRun.objects.create( - org=self.org, flow=flow, contact=contact, session=session2, status=FlowRun.STATUS_WAITING - ) - run3 = FlowRun.objects.create( - org=self.org, flow=flow, contact=contact, session=session3, status=FlowRun.STATUS_WAITING - ) - - # create an IVR call with session - call = self.create_incoming_call(flow, contact) - run4 = call.session.runs.get() - - self.assertIsNotNone(run1.session) - self.assertIsNotNone(run2.session) - self.assertIsNotNone(run3.session) - self.assertIsNotNone(run4.session) - - # end run1 and run4's sessions in the past - run1.status = FlowRun.STATUS_COMPLETED - run1.exited_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) - run1.save(update_fields=("status", "exited_on")) - run1.session.status = FlowSession.STATUS_COMPLETED - run1.session.ended_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) - run1.session.save(update_fields=("status", "ended_on")) - - run4.status = FlowRun.STATUS_INTERRUPTED - run4.exited_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) - run4.save(update_fields=("status", "exited_on")) - run4.session.status = FlowSession.STATUS_INTERRUPTED - run4.session.ended_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) - run4.session.save(update_fields=("status", "ended_on")) - - # end run2's session now - run2.status = FlowRun.STATUS_EXPIRED - run2.exited_on = timezone.now() - run2.save(update_fields=("status", "exited_on")) - run4.session.status = FlowSession.STATUS_EXPIRED - run2.session.ended_on = timezone.now() - run2.session.save(update_fields=("status", "ended_on")) - - trim_flow_sessions() - - run1, run2, run3, run4 = FlowRun.objects.order_by("id") - - self.assertIsNone(run1.session) - self.assertIsNotNone(run2.session) # ended too recently to be deleted - self.assertIsNotNone(run3.session) # never ended - self.assertIsNone(run4.session) - - # only sessions for run2 and run3 are left - self.assertEqual(FlowSession.objects.count(), 2) - - -class ResultsExportTest(TembaTest): - def setUp(self): - super().setUp() - - self.contact = self.create_contact("Eric", phone="+250788382382") - self.contact2 = self.create_contact("Nic", phone="+250788383383") - self.contact3 = self.create_contact("Norbert", phone="+250788123456") - - def _export( - self, - flow, - start_date, - end_date, - responded_only=False, - with_fields=(), - with_groups=(), - extra_urns=(), - has_results=True, - ): - """ - Exports results for the given flow and returns the generated workbook - """ - - readonly_models = {FlowRun} - if has_results: - readonly_models.add(Contact) - readonly_models.add(ContactURN) - - export = ResultsExport.create( - self.org, - self.admin, - start_date, - end_date, - flows=[flow], - with_fields=with_fields, - with_groups=with_groups, - responded_only=responded_only, - extra_urns=extra_urns, - ) - - with self.mockReadOnly(assert_models=readonly_models): - export.perform() - - return load_workbook(filename=default_storage.open(f"orgs/{self.org.id}/results_exports/{export.uuid}.xlsx")) - - @mock_mailroom - def test_export(self, mr_mocks): - today = timezone.now().astimezone(self.org.timezone).date() - - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[4] - color_other = flow_nodes[3] - orange_reply = flow_nodes[1] - - # add a spec for a hidden result to this flow - flow.metadata[Flow.METADATA_RESULTS].append( - { - "key": "_color_classification", - "name": "_Color Classification", - "categories": ["Success", "Skipped", "Failure"], - "node_uuids": [color_split["uuid"]], - } - ) - - age = self.create_field("age", "Age") - devs = self.create_group("Devs", [self.contact]) - - mods = self.contact.update_fields({age: "36"}) - mods += self.contact.update_urns(["tel:+250788382382", "twitter:erictweets"]) - self.contact.modify(self.admin, mods) - - # contact name with an illegal character - self.contact3.name = "Nor\02bert" - self.contact3.save(update_fields=("name",)) - - contact3_run1 = ( - MockSessionWriter(self.contact3, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - contact1_in1 = self.create_incoming_msg(self.contact, "light beige") - contact1_in2 = self.create_incoming_msg(self.contact, "orange") - contact1_run1 = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact1_in1) - .set_result("Color", "light beige", "Other", "light beige") - .visit(color_other) - .send_msg("That is a funny color. Try again.", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact1_in2) - .set_result("Color", "orange", "Orange", "orange") - .set_result("_Color Classification", "orange", "Success", "color_selection") # hidden result - .visit(orange_reply) - .send_msg( - "I love orange too! You said: orange which is category: Orange You are: 0788 382 382 SMS: orange Flow: color: orange", - self.channel, - ) - .complete() - .save() - ).session.runs.get() - - contact2_in1 = self.create_incoming_msg(self.contact2, "green") - contact2_run1 = ( - MockSessionWriter(self.contact2, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact2_in1) - .set_result("Color", "green", "Other", "green") - .visit(color_other) - .send_msg("That is a funny color. Try again.", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - contact2_run2 = ( - MockSessionWriter(self.contact2, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - contact1_in3 = self.create_incoming_msg(self.contact, " blue ") - contact1_run2 = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact1_in3) - .set_result("Color", "blue", "Blue", " blue ") - .visit(orange_reply) - .send_msg("Blue is sad. :(", self.channel) - .complete() - .save() - ).session.runs.get() - - for run in (contact1_run1, contact2_run1, contact3_run1, contact1_run2, contact2_run2): - run.refresh_from_db() - - with self.assertNumQueries(23): - workbook = self._export( - flow, - start_date=today - timedelta(days=7), - end_date=today, - with_groups=[devs], - ) - - # check that notifications were created - export = Export.objects.filter(export_type=ResultsExport.slug).order_by("id").last() - self.assertEqual(1, self.admin.notifications.filter(notification_type="export:finished", export=export).count()) - - tz = self.org.timezone - - (sheet_runs,) = workbook.worksheets - - # check runs sheet... - self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs - self.assertEqual(12, len(list(sheet_runs.columns))) - - self.assertExcelRow( - sheet_runs, - 0, - [ - "Contact UUID", - "Contact Name", - "URN Scheme", - "URN Value", - "Group:Devs", - "Started", - "Modified", - "Exited", - "Run UUID", - "Color (Category) - Colors", - "Color (Value) - Colors", - "Color (Text) - Colors", - ], - ) - - self.assertExcelRow( - sheet_runs, - 1, - [ - contact3_run1.contact.uuid, - "Norbert", - "tel", - "+250788123456", - False, - contact3_run1.created_on, - contact3_run1.modified_on, - "", - contact3_run1.uuid, - "", - "", - "", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 2, - [ - contact1_run1.contact.uuid, - "Eric", - "tel", - "+250788382382", - True, - contact1_run1.created_on, - contact1_run1.modified_on, - contact1_run1.exited_on, - contact1_run1.uuid, - "Orange", - "orange", - "orange", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 3, - [ - contact2_run1.contact.uuid, - "Nic", - "tel", - "+250788383383", - False, - contact2_run1.created_on, - contact2_run1.modified_on, - contact2_run1.exited_on, - contact2_run1.uuid, - "Other", - "green", - "green", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 4, - [ - contact2_run2.contact.uuid, - "Nic", - "tel", - "+250788383383", - False, - contact2_run2.created_on, - contact2_run2.modified_on, - "", - contact2_run2.uuid, - "", - "", - "", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 5, - [ - contact1_run2.contact.uuid, - "Eric", - "tel", - "+250788382382", - True, - contact1_run2.created_on, - contact1_run2.modified_on, - contact1_run2.exited_on, - contact1_run2.uuid, - "Blue", - "blue", - " blue ", - ], - tz, - ) - - # test without unresponded - with self.assertNumQueries(21): - workbook = self._export( - flow, - start_date=today - timedelta(days=7), - end_date=today, - responded_only=True, - with_groups=(devs,), - ) - - tz = self.org.timezone - sheet_runs = workbook.worksheets[0] - - self.assertEqual(4, len(list(sheet_runs.rows))) # header + 3 runs - self.assertEqual(12, len(list(sheet_runs.columns))) - - self.assertExcelRow( - sheet_runs, - 0, - [ - "Contact UUID", - "Contact Name", - "URN Scheme", - "URN Value", - "Group:Devs", - "Started", - "Modified", - "Exited", - "Run UUID", - "Color (Category) - Colors", - "Color (Value) - Colors", - "Color (Text) - Colors", - ], - ) - - self.assertExcelRow( - sheet_runs, - 1, - [ - contact1_run1.contact.uuid, - "Eric", - "tel", - "+250788382382", - True, - contact1_run1.created_on, - contact1_run1.modified_on, - contact1_run1.exited_on, - contact1_run1.uuid, - "Orange", - "orange", - "orange", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 2, - [ - contact2_run1.contact.uuid, - "Nic", - "tel", - "+250788383383", - False, - contact2_run1.created_on, - contact2_run1.modified_on, - contact2_run1.exited_on, - contact2_run1.uuid, - "Other", - "green", - "green", - ], - tz, - ) - - # test export with a contact field - with self.assertNumQueries(25): - workbook = self._export( - flow, - start_date=today - timedelta(days=7), - end_date=today, - with_fields=[age], - with_groups=[devs], - responded_only=True, - extra_urns=["twitter", "line"], - ) - - tz = self.org.timezone - (sheet_runs,) = workbook.worksheets - - # check runs sheet... - self.assertEqual(4, len(list(sheet_runs.rows))) # header + 3 runs - self.assertEqual(15, len(list(sheet_runs.columns))) - - self.assertExcelRow( - sheet_runs, - 0, - [ - "Contact UUID", - "Contact Name", - "URN Scheme", - "URN Value", - "Field:Age", - "Group:Devs", - "URN:Twitter", - "URN:Line", - "Started", - "Modified", - "Exited", - "Run UUID", - "Color (Category) - Colors", - "Color (Value) - Colors", - "Color (Text) - Colors", - ], - ) - - self.assertExcelRow( - sheet_runs, - 1, - [ - contact1_run1.contact.uuid, - "Eric", - "tel", - "+250788382382", - "36", - True, - "erictweets", - "", - contact1_run1.created_on, - contact1_run1.modified_on, - contact1_run1.exited_on, - contact1_run1.uuid, - "Orange", - "orange", - "orange", - ], - tz, - ) - - # test that we don't exceed the limit on rows per sheet - with patch("temba.utils.export.MultiSheetExporter.MAX_EXCEL_ROWS", 4): - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) - expected_sheets = [("Runs 1", 4), ("Runs 2", 3)] - - for s, sheet in enumerate(workbook.worksheets): - self.assertEqual((sheet.title, len(list(sheet.rows))), expected_sheets[s]) - - # test we can export archived flows - flow.is_archived = True - flow.save() - - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) - - (sheet_runs,) = workbook.worksheets - - # check runs sheet... - self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs - self.assertEqual(11, len(list(sheet_runs.columns))) - - def test_anon_org(self): - today = timezone.now().astimezone(self.org.timezone).date() - - with self.anonymous(self.org): - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[4] - - msg_in = self.create_incoming_msg(self.contact, "orange") - - run1 = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=msg_in) - .set_result("Color", "orange", "Orange", "orange") - .send_msg("I love orange too!", self.channel) - .complete() - .save() - ).session.runs.get() - - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) - self.assertEqual(1, len(workbook.worksheets)) - sheet_runs = workbook.worksheets[0] - self.assertExcelRow( - sheet_runs, - 0, - [ - "Contact UUID", - "Contact Name", - "URN Scheme", - "Anon Value", - "Started", - "Modified", - "Exited", - "Run UUID", - "Color (Category) - Colors", - "Color (Value) - Colors", - "Color (Text) - Colors", - ], - ) - - self.assertExcelRow( - sheet_runs, - 1, - [ - self.contact.uuid, - "Eric", - "tel", - self.contact.anon_display, - run1.created_on, - run1.modified_on, - run1.exited_on, - run1.uuid, - "Orange", - "orange", - "orange", - ], - self.org.timezone, - ) - - def test_broadcast_only_flow(self): - flow = self.get_flow("send_only_v13") - send_node = flow.get_definition()["nodes"][0] - today = timezone.now().astimezone(self.org.timezone).date() - - for contact in [self.contact, self.contact2, self.contact3]: - ( - MockSessionWriter(contact, flow) - .visit(send_node) - .send_msg("This is the first message.", self.channel) - .send_msg("This is the second message.", self.channel) - .complete() - .save() - ).session.runs.get() - - for contact in [self.contact, self.contact2]: - ( - MockSessionWriter(contact, flow) - .visit(send_node) - .send_msg("This is the first message.", self.channel) - .send_msg("This is the second message.", self.channel) - .complete() - .save() - ).session.runs.get() - - contact1_run1, contact2_run1, contact3_run1, contact1_run2, contact2_run2 = FlowRun.objects.order_by("id") - - with self.assertNumQueries(17): - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) - - tz = self.org.timezone - - (sheet_runs,) = workbook.worksheets - - # check runs sheet... - self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs - self.assertEqual(8, len(list(sheet_runs.columns))) - - self.assertExcelRow( - sheet_runs, - 0, - ["Contact UUID", "Contact Name", "URN Scheme", "URN Value", "Started", "Modified", "Exited", "Run UUID"], - ) - - self.assertExcelRow( - sheet_runs, - 1, - [ - contact1_run1.contact.uuid, - "Eric", - "tel", - "+250788382382", - contact1_run1.created_on, - contact1_run1.modified_on, - contact1_run1.exited_on, - contact1_run1.uuid, - ], - tz, - ) - self.assertExcelRow( - sheet_runs, - 2, - [ - contact2_run1.contact.uuid, - "Nic", - "tel", - "+250788383383", - contact2_run1.created_on, - contact2_run1.modified_on, - contact2_run1.exited_on, - contact2_run1.uuid, - ], - tz, - ) - self.assertExcelRow( - sheet_runs, - 3, - [ - contact3_run1.contact.uuid, - "Norbert", - "tel", - "+250788123456", - contact3_run1.created_on, - contact3_run1.modified_on, - contact3_run1.exited_on, - contact3_run1.uuid, - ], - tz, - ) - self.assertExcelRow( - sheet_runs, - 4, - [ - contact1_run2.contact.uuid, - "Eric", - "tel", - "+250788382382", - contact1_run2.created_on, - contact1_run2.modified_on, - contact1_run2.exited_on, - contact1_run2.uuid, - ], - tz, - ) - self.assertExcelRow( - sheet_runs, - 5, - [ - contact2_run2.contact.uuid, - "Nic", - "tel", - "+250788383383", - contact2_run2.created_on, - contact2_run2.modified_on, - contact2_run2.exited_on, - contact2_run2.uuid, - ], - tz, - ) - - # test without unresponded - with self.assertNumQueries(10): - workbook = self._export( - flow, - start_date=today - timedelta(days=7), - end_date=today, - responded_only=True, - has_results=False, - ) - - (sheet_runs,) = workbook.worksheets - - self.assertEqual(1, len(list(sheet_runs.rows)), 1) # header; no resposes to a broadcast only flow - self.assertEqual(8, len(list(sheet_runs.columns))) - - self.assertExcelRow( - sheet_runs, - 0, - ["Contact UUID", "Contact Name", "URN Scheme", "URN Value", "Started", "Modified", "Exited", "Run UUID"], - ) - - def test_replaced_rulesets(self): - today = timezone.now().astimezone(self.org.timezone).date() - - favorites = self.get_flow("favorites_v13") - flow_json = favorites.get_definition() - flow_nodes = flow_json["nodes"] - color_prompt = flow_nodes[0] - color_other = flow_nodes[1] - color_split = flow_nodes[2] - beer_prompt = flow_nodes[3] - beer_split = flow_nodes[5] - - contact3_run1 = ( - MockSessionWriter(self.contact3, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - contact1_in1 = self.create_incoming_msg(self.contact, "light beige") - contact1_in2 = self.create_incoming_msg(self.contact, "red") - contact1_run1 = ( - MockSessionWriter(self.contact, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact1_in1) - .set_result("Color", "light beige", "Other", "light beige") - .visit(color_other) - .send_msg("I don't know that color. Try again.", self.channel) - .visit(color_split) - .wait() - .save() - .resume(msg=contact1_in2) - .set_result("Color", "red", "Red", "red") - .visit(beer_prompt) - .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) - .visit(beer_split) - .complete() - .save() - ).session.runs.get() - - devs = self.create_group("Devs", [self.contact]) - - # now remap the uuid for our color - flow_json = json.loads(json.dumps(flow_json).replace(color_split["uuid"], str(uuid4()))) - favorites.save_revision(self.admin, flow_json) - flow_nodes = flow_json["nodes"] - color_prompt = flow_nodes[0] - color_other = flow_nodes[1] - color_split = flow_nodes[2] - - contact2_in1 = self.create_incoming_msg(self.contact2, "green") - contact2_run1 = ( - MockSessionWriter(self.contact2, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact2_in1) - .set_result("Color", "green", "Green", "green") - .visit(beer_prompt) - .send_msg("Good choice, I like Green too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .save() - ).session.runs.get() - - contact2_run2 = ( - MockSessionWriter(self.contact2, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - contact1_in3 = self.create_incoming_msg(self.contact, " blue ") - contact1_run2 = ( - MockSessionWriter(self.contact, favorites) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact1_in3) - .set_result("Color", "blue", "Blue", " blue ") - .visit(beer_prompt) - .send_msg("Good choice, I like Blue too! What is your favorite beer?", self.channel) - .visit(beer_split) - .wait() - .save() - ).session.runs.get() - - for run in (contact1_run1, contact2_run1, contact3_run1, contact1_run2, contact2_run2): - run.refresh_from_db() - - workbook = self._export(favorites, start_date=today - timedelta(days=7), end_date=today, with_groups=[devs]) - - tz = self.org.timezone - - (sheet_runs,) = workbook.worksheets - - # check runs sheet... - self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs - self.assertEqual(18, len(list(sheet_runs.columns))) - - self.assertExcelRow( - sheet_runs, - 0, - [ - "Contact UUID", - "Contact Name", - "URN Scheme", - "URN Value", - "Group:Devs", - "Started", - "Modified", - "Exited", - "Run UUID", - "Color (Category) - Favorites", - "Color (Value) - Favorites", - "Color (Text) - Favorites", - "Beer (Category) - Favorites", - "Beer (Value) - Favorites", - "Beer (Text) - Favorites", - "Name (Category) - Favorites", - "Name (Value) - Favorites", - "Name (Text) - Favorites", - ], - ) - - self.assertExcelRow( - sheet_runs, - 1, - [ - contact3_run1.contact.uuid, - "Norbert", - "tel", - "+250788123456", - False, - contact3_run1.created_on, - contact3_run1.modified_on, - "", - contact3_run1.uuid, - "", - "", - "", - "", - "", - "", - "", - "", - "", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 2, - [ - contact1_run1.contact.uuid, - "Eric", - "tel", - "+250788382382", - True, - contact1_run1.created_on, - contact1_run1.modified_on, - contact1_run1.exited_on, - contact1_run1.uuid, - "Red", - "red", - "red", - "", - "", - "", - "", - "", - "", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 3, - [ - contact2_run1.contact.uuid, - "Nic", - "tel", - "+250788383383", - False, - contact2_run1.created_on, - contact2_run1.modified_on, - contact2_run1.exited_on, - contact2_run1.uuid, - "Green", - "green", - "green", - "", - "", - "", - "", - "", - "", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 4, - [ - contact2_run2.contact.uuid, - "Nic", - "tel", - "+250788383383", - False, - contact2_run2.created_on, - contact2_run2.modified_on, - "", - contact2_run2.uuid, - "", - "", - "", - "", - "", - "", - "", - "", - "", - ], - tz, - ) - - self.assertExcelRow( - sheet_runs, - 5, - [ - contact1_run2.contact.uuid, - "Eric", - "tel", - "+250788382382", - True, - contact1_run2.created_on, - contact1_run2.modified_on, - "", - contact1_run2.uuid, - "Blue", - "blue", - " blue ", - "", - "", - "", - "", - "", - "", - ], - tz, - ) - - def test_remove_control_characters(self): - today = timezone.now().astimezone(self.org.timezone).date() - - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[4] - color_other = flow_nodes[3] - - msg_in = self.create_incoming_msg(self.contact, "ngert\x07in.") - - run1 = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=msg_in) - .set_result("Color", "ngert\x07in.", "Other", "ngert\x07in.") - .visit(color_other) - .send_msg("That is a funny color. Try again.", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) - tz = self.org.timezone - (sheet_runs,) = workbook.worksheets - - self.assertExcelRow( - sheet_runs, - 1, - [ - run1.contact.uuid, - "Eric", - "tel", - "+250788382382", - run1.created_on, - run1.modified_on, - "", - run1.uuid, - "Other", - "ngertin.", - "ngertin.", - ], - tz, - ) - - def test_from_archives(self): - today = timezone.now().astimezone(self.org.timezone).date() - - flow = self.get_flow("color_v13") - flow_nodes = flow.get_definition()["nodes"] - color_prompt = flow_nodes[0] - color_split = flow_nodes[4] - color_other = flow_nodes[3] - blue_reply = flow_nodes[2] - - contact1_in1 = self.create_incoming_msg(self.contact, "green") - contact1_run = ( - MockSessionWriter(self.contact, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact1_in1) - .set_result("Color", "green", "Other", "green") - .visit(color_other) - .send_msg("That is a funny color. Try again.", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - contact2_in1 = self.create_incoming_msg(self.contact2, "blue") - contact2_run = ( - MockSessionWriter(self.contact2, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .resume(msg=contact2_in1) - .set_result("Color", "blue", "Blue", "blue") - .visit(blue_reply) - .send_msg("Blue is sad :(.", self.channel) - .complete() - .save() - ).session.runs.get() - - # and a run for a different flow - flow2 = self.get_flow("favorites_v13") - flow2_nodes = flow2.get_definition()["nodes"] - - contact2_other_flow = ( - MockSessionWriter(self.contact2, flow2) - .visit(flow2_nodes[0]) - .send_msg("Color???", self.channel) - .visit(flow2_nodes[2]) - .wait() - .save() - ).session.runs.get() - - contact3_run = ( - MockSessionWriter(self.contact3, flow) - .visit(color_prompt) - .send_msg("What is your favorite color?", self.channel) - .visit(color_split) - .wait() - .save() - ).session.runs.get() - - # we now have 4 runs in this order of modified_on - contact1_run.refresh_from_db() - contact2_run.refresh_from_db() - contact2_other_flow.refresh_from_db() - contact3_run.refresh_from_db() - - # archive the first 3 runs, using 'old' archive format that used a list of values for one of them - old_archive_format = contact2_run.as_archive_json() - old_archive_format["values"] = [old_archive_format["values"]] - - self.create_archive( - Archive.TYPE_FLOWRUN, - "D", - timezone.now().date(), - [contact1_run.as_archive_json(), old_archive_format, contact2_other_flow.as_archive_json()], - ) - - contact1_run.delete() - contact2_run.delete() - - # create an archive earlier than our flow created date so we check that it isn't included - self.create_archive( - Archive.TYPE_FLOWRUN, - "D", - timezone.now().date() - timedelta(days=2), - [contact2_run.as_archive_json()], - ) - - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) - - tz = self.org.timezone - (sheet_runs,) = workbook.worksheets - - # check runs sheet... - self.assertEqual(4, len(list(sheet_runs.rows))) # header + 3 runs - - self.assertExcelRow( - sheet_runs, - 1, - [ - contact1_run.contact.uuid, - "Eric", - "tel", - "+250788382382", - contact1_run.created_on, - contact1_run.modified_on, - "", - contact1_run.uuid, - "Other", - "green", - "green", - ], - tz, - ) - self.assertExcelRow( - sheet_runs, - 2, - [ - contact2_run.contact.uuid, - "Nic", - "tel", - "+250788383383", - contact2_run.created_on, - contact2_run.modified_on, - contact2_run.exited_on, - contact2_run.uuid, - "Blue", - "blue", - "blue", - ], - tz, - ) - self.assertExcelRow( - sheet_runs, - 3, - [ - contact3_run.contact.uuid, - "Norbert", - "tel", - "+250788123456", - contact3_run.created_on, - contact3_run.modified_on, - "", - contact3_run.uuid, - "", - "", - "", - ], - tz, - ) - - def test_no_responses(self): - today = timezone.now().astimezone(self.org.timezone).date() - flow = self.get_flow("color_v13") - - self.assertEqual(flow.get_run_stats()["total"], 0) - - workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today, has_results=False) - - self.assertEqual(len(workbook.worksheets), 1) - - # every sheet has only the head row - self.assertEqual(1, len(list(workbook.worksheets[0].rows))) - self.assertEqual(11, len(list(workbook.worksheets[0].columns))) - - -class FlowLabelTest(TembaTest): - def test_model(self): - label = FlowLabel.create(self.org, self.admin, "Cool Flows") - self.assertEqual("Cool Flows", label.name) - - # can't create with invalid name - with self.assertRaises(AssertionError): - FlowLabel.create(self.org, self.admin, '"Cool"') - - # can't create with duplicate name - with self.assertRaises(AssertionError): - FlowLabel.create(self.org, self.admin, "Cool Flows") - - flow1 = self.create_flow("Flow 1") - flow2 = self.create_flow("Flow 2") - - label.toggle_label([flow1, flow2], add=True) - self.assertEqual({flow1, flow2}, set(label.get_flows())) - - label.toggle_label([flow1], add=False) - self.assertEqual({flow2}, set(label.get_flows())) - - -class FlowLabelCRUDLTest(TembaTest, CRUDLTestMixin): - def test_create(self): - create_url = reverse("flows.flowlabel_create") - - self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) - self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=("name", "flows")) - - # try to submit without a name - self.assertCreateSubmit(create_url, self.admin, {}, form_errors={"name": "This field is required."}) - - # try to submit with an invalid name - self.assertCreateSubmit( - create_url, self.admin, {"name": '"Cool"\\'}, form_errors={"name": 'Cannot contain the character: "'} - ) - - self.assertCreateSubmit( - create_url, - self.admin, - {"name": "Cool Flows"}, - new_obj_query=FlowLabel.objects.filter(org=self.org, name="Cool Flows"), - ) - - # try to create with a name that's already used - self.assertCreateSubmit(create_url, self.admin, {"name": "Cool Flows"}, form_errors={"name": "Must be unique."}) - - def test_update(self): - label = FlowLabel.create(self.org, self.admin, "Cool Flows") - FlowLabel.create(self.org, self.admin, "Crazy Flows") - - update_url = reverse("flows.flowlabel_update", args=[label.id]) - - self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) - self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=("name", "flows")) - - # try to update to an invalid name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": '"Cool"\\'}, - form_errors={"name": 'Cannot contain the character: "'}, - object_unchanged=label, - ) - - # try to update to a non-unique name - self.assertUpdateSubmit( - update_url, - self.admin, - {"name": "Crazy Flows"}, - form_errors={"name": "Must be unique."}, - object_unchanged=label, - ) - - self.assertUpdateSubmit(update_url, self.admin, {"name": "Super Cool Flows"}) - - label.refresh_from_db() - self.assertEqual("Super Cool Flows", label.name) - - def test_delete(self): - label = FlowLabel.create(self.org, self.admin, "Cool Flows") - - delete_url = reverse("flows.flowlabel_delete", args=[label.id]) - - self.assertRequestDisallowed(delete_url, [None, self.user, self.agent, self.admin2]) - - self.assertDeleteFetch(delete_url, [self.editor, self.admin]) - self.assertDeleteSubmit(delete_url, self.admin, object_deleted=label, success_status=200) - - -class SimulationTest(TembaTest): - def add_message(self, payload, text): - """ - Add a message to the payload for the flow server using the default contact - """ - payload["resume"] = { - "type": "msg", - "resumed_on": timezone.now().isoformat(), - "msg": {"text": text, "uuid": str(uuid4()), "urn": "tel:+12065551212"}, - } - - def get_replies(self, response): - """ - Gets any replies in a response from the flow server as a list of strings - """ - replies = [] - for event in response.get("events", []): - if event["type"] == "broadcast_created": - replies.append(event["translations"][event["base_language"]]["text"]) - elif event["type"] == "msg_created": - replies.append(event["msg"]["text"]) - return replies - - def test_simulation_ivr(self): - self.login(self.admin) - flow = self.get_flow("ivr") - - # create our payload - payload = {"version": 2, "trigger": {}, "flow": {}} - url = reverse("flows.flow_simulate", args=[flow.id]) - - with override_settings(MAILROOM_AUTH_TOKEN="sesame", MAILROOM_URL="https://mailroom.temba.io"): - with patch("requests.post") as mock_post: - mock_post.return_value = MockJsonResponse(200, {"session": {}}) - response = self.client.post(url, payload, content_type="application/json") - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), {"session": {}}) - - # since this is an IVR flow, the session trigger will have a connection - self.assertEqual( - { - "call": { - "channel": {"uuid": "440099cf-200c-4d45-a8e7-4a564f4a0e8b", "name": "Test Channel"}, - "urn": "tel:+12065551212", - }, - "environment": { - "date_format": "DD-MM-YYYY", - "time_format": "tt:mm", - "timezone": "Africa/Kigali", - "allowed_languages": ["eng", "kin"], - "default_country": "RW", - "redaction_policy": "none", - "input_collation": "default", - }, - "user": {"email": "admin@nyaruka.com", "name": "Andy"}, - }, - json.loads(mock_post.call_args[1]["data"])["trigger"], - ) - - def test_simulation(self): - self.login(self.admin) - flow = self.get_flow("favorites") - - # create our payload - payload = dict(version=2, trigger={}, flow={}) - - url = reverse("flows.flow_simulate", args=[flow.pk]) - - with override_settings(MAILROOM_AUTH_TOKEN="sesame", MAILROOM_URL="https://mailroom.temba.io"): - with patch("requests.post") as mock_post: - mock_post.return_value = MockJsonResponse(400, {"session": {}}) - response = self.client.post(url, json.dumps(payload), content_type="application/json") - self.assertEqual(500, response.status_code) - - # start a flow - with patch("requests.post") as mock_post: - mock_post.return_value = MockJsonResponse(200, {"session": {}}) - response = self.client.post(url, json.dumps(payload), content_type="application/json") - self.assertEqual(200, response.status_code) - self.assertEqual({}, response.json()["session"]) - - actual_url = mock_post.call_args_list[0][0][0] - actual_payload = json.loads(mock_post.call_args_list[0][1]["data"]) - actual_headers = mock_post.call_args_list[0][1]["headers"] - - self.assertEqual(actual_url, "https://mailroom.temba.io/mr/sim/start") - self.assertEqual(actual_payload["org_id"], flow.org_id) - self.assertEqual(actual_payload["trigger"]["environment"]["date_format"], "DD-MM-YYYY") - self.assertEqual(len(actual_payload["assets"]["channels"]), 1) # fake channel - self.assertEqual(len(actual_payload["flows"]), 1) - self.assertEqual(actual_headers["Authorization"], "Token sesame") - self.assertEqual(actual_headers["Content-Type"], "application/json") - - # try a resume - payload = { - "version": 2, - "session": {"contact": {"fields": {"age": decimal.Decimal("39")}}}, - "resume": {}, - "flow": {}, - } - - with patch("requests.post") as mock_post: - mock_post.return_value = MockJsonResponse(400, {"session": {}}) - response = self.client.post(url, json.dumps(payload), content_type="application/json") - self.assertEqual(500, response.status_code) - - with patch("requests.post") as mock_post: - mock_post.return_value = MockJsonResponse(200, {"session": {}}) - response = self.client.post(url, json.dumps(payload), content_type="application/json") - self.assertEqual(200, response.status_code) - self.assertEqual({}, response.json()["session"]) - - actual_url = mock_post.call_args_list[0][0][0] - actual_payload = json.loads(mock_post.call_args_list[0][1]["data"]) - actual_headers = mock_post.call_args_list[0][1]["headers"] - - self.assertEqual(actual_url, "https://mailroom.temba.io/mr/sim/resume") - self.assertEqual(actual_payload["org_id"], flow.org_id) - self.assertEqual(actual_payload["resume"]["environment"]["date_format"], "DD-MM-YYYY") - self.assertEqual(len(actual_payload["assets"]["channels"]), 1) # fake channel - self.assertEqual(len(actual_payload["flows"]), 1) - self.assertEqual(actual_headers["Authorization"], "Token sesame") - self.assertEqual(actual_headers["Content-Type"], "application/json") - - -class FlowSessionCRUDLTest(TembaTest): - def test_session_json(self): - contact = self.create_contact("Bob", phone="+1234567890") - flow = self.get_flow("color_v13") - - session = MockSessionWriter(contact, flow).wait().save().session - - # normal users can't see session json - json_url = reverse("flows.flowsession_json", args=[session.uuid]) - response = self.client.get(json_url) - self.assertLoginRedirect(response) - - self.login(self.admin) - response = self.client.get(json_url) - self.assertLoginRedirect(response) - - # but logged in as a CS rep we can - self.login(self.customer_support, choose_org=self.org) - - response = self.client.get(json_url) - self.assertEqual(200, response.status_code) - - response_json = json.loads(response.content) - self.assertEqual("Nyaruka", response_json["_metadata"]["org"]) - self.assertEqual(session.uuid, response_json["uuid"]) - - # now try with an s3 session - s3.client().put_object( - Bucket="test-sessions", Key="c/session.json", Body=io.BytesIO(json.dumps(session.output).encode()) - ) - FlowSession.objects.filter(id=session.id).update( - output_url="http://minio:9000/test-sessions/c/session.json", output=None - ) - - # fetch our contact history - response = self.client.get(json_url) - self.assertEqual(200, response.status_code) - self.assertEqual("Nyaruka", response_json["_metadata"]["org"]) - self.assertEqual(session.uuid, response_json["uuid"]) - - -class FlowStartTest(TembaTest): - @mock_mailroom - def test_preview(self, mr_mocks): - flow = self.create_flow("Test") - contact1 = self.create_contact("Ann", phone="+1234567111") - contact2 = self.create_contact("Bob", phone="+1234567222") - doctors = self.create_group("Doctors", contacts=[contact1, contact2]) - - mr_mocks.flow_start_preview(query='group = "Doctors" AND status = "active"', total=100) - - query, total = FlowStart.preview( - flow, - include=mailroom.Inclusions(group_uuids=[str(doctors.uuid)]), - exclude=mailroom.Exclusions(non_active=True), - ) - - self.assertEqual('group = "Doctors" AND status = "active"', query) - self.assertEqual(100, total) - - -class FlowStartCRUDLTest(TembaTest, CRUDLTestMixin): - def test_list(self): - list_url = reverse("flows.flowstart_list") - - flow1 = self.create_flow("Test Flow 1") - flow2 = self.create_flow("Test 2") - - contact = self.create_contact("Bob", phone="+1234567890") - group = self.create_group("Testers", contacts=[contact]) - start1 = FlowStart.create(flow1, self.admin, contacts=[contact]) - start2 = FlowStart.create( - flow1, self.admin, query="name ~ Bob", start_type="A", exclusions={"started_previously": True} - ) - start3 = FlowStart.create(flow2, self.admin, groups=[group], start_type="Z", exclusions={"in_a_flow": True}) - - flow2.release(self.admin) - - FlowStartCount.objects.create(start=start3, count=1000) - FlowStartCount.objects.create(start=start3, count=234) - - other_org_flow = self.create_flow("Test", org=self.org2) - FlowStart.create(other_org_flow, self.admin2) - - self.assertRequestDisallowed(list_url, [None, self.agent]) - response = self.assertListFetch( - list_url, [self.user, self.editor, self.admin], context_objects=[start3, start2, start1] - ) - - self.assertContains(response, "Test Flow 1") - self.assertNotContains(response, "Test Flow 2") - self.assertContains(response, "A deleted flow") - self.assertContains(response, "was started by admin@nyaruka.com") - self.assertContains(response, "was started by an API call") - self.assertContains(response, "was started by Zapier") - self.assertContains(response, "Not in a flow") - self.assertContains(response, "1,234 runs") - - response = self.assertListFetch(list_url + "?type=manual", [self.admin], context_objects=[start1]) - self.assertTrue(response.context["filtered"]) - self.assertEqual(response.context["url_params"], "?type=manual&") - - -class AssetServerTest(TembaTest): - def test_languages(self): - self.login(self.admin) - response = self.client.get("/flow/assets/%d/1234/language/" % self.org.id) - self.assertEqual( - response.json(), {"results": [{"iso": "eng", "name": "English"}, {"iso": "kin", "name": "Kinyarwanda"}]} - ) - - -class SystemChecksTest(TembaTest): - def test_mailroom_url(self): - with override_settings(MAILROOM_URL="http://mailroom.io"): - self.assertEqual(len(mailroom_url(None)), 0) - - with override_settings(MAILROOM_URL=None): - self.assertEqual(mailroom_url(None)[0].msg, "No mailroom URL set, simulation will not be available") - - -class FlowRevisionTest(TembaTest): - def test_trim_revisions(self): - start = timezone.now() - - color = self.get_flow("color") - clinic = self.get_flow("the_clinic") - - revision = 100 - FlowRevision.objects.all().update(revision=revision) - - # create a single old clinic revision - FlowRevision.objects.create( - flow=clinic, - definition=dict(), - revision=99, - created_on=timezone.now() - timedelta(days=7), - created_by=self.admin, - ) - - # make a bunch of revisions for color on the same day - created = timezone.now().replace(hour=6) - timedelta(days=1) - for i in range(25): - revision -= 1 - created = created - timedelta(minutes=1) - FlowRevision.objects.create( - flow=color, definition=dict(), revision=revision, created_by=self.admin, created_on=created - ) - - # then for 5 days prior, make a few more - for i in range(5): - created = created - timedelta(days=1) - for i in range(10): - revision -= 1 - created = created - timedelta(minutes=1) - FlowRevision.objects.create( - flow=color, definition=dict(), revision=revision, created_by=self.admin, created_on=created - ) - - # trim our flow revisions, should be left with original (today), 25 from yesterday, 1 per day for 5 days = 31 - self.assertEqual(76, FlowRevision.objects.filter(flow=color).count()) - self.assertEqual(45, FlowRevision.trim(start)) - self.assertEqual(31, FlowRevision.objects.filter(flow=color).count()) - self.assertEqual( - 7, - FlowRevision.objects.filter(flow=color) - .annotate(created_date=TruncDate("created_on")) - .distinct("created_date") - .count(), - ) - - # trim our clinic flow manually, should remain unchanged - self.assertEqual(2, FlowRevision.objects.filter(flow=clinic).count()) - self.assertEqual(0, FlowRevision.trim_for_flow(clinic.id)) - self.assertEqual(2, FlowRevision.objects.filter(flow=clinic).count()) - - # call our task - trim_flow_revisions() - self.assertEqual(2, FlowRevision.objects.filter(flow=clinic).count()) - self.assertEqual(31, FlowRevision.objects.filter(flow=color).count()) - - # call again (testing reading redis key) - trim_flow_revisions() - self.assertEqual(2, FlowRevision.objects.filter(flow=clinic).count()) - self.assertEqual(31, FlowRevision.objects.filter(flow=color).count()) diff --git a/temba/flows/tests/__init__.py b/temba/flows/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/temba/flows/tests/test_counts.py b/temba/flows/tests/test_counts.py new file mode 100644 index 00000000000..df8b558b056 --- /dev/null +++ b/temba/flows/tests/test_counts.py @@ -0,0 +1,271 @@ +from datetime import date, timedelta, timezone as tzone + +from django.db import connection +from django.utils import timezone + +from temba.flows.models import FlowActivityCount, FlowRun, FlowSession +from temba.flows.tasks import squash_activity_counts +from temba.tests import TembaTest +from temba.utils.uuid import uuid4 + + +class FlowActivityCountTest(TembaTest): + def test_node_counts(self): + flow = self.create_flow("Test 1") + contact = self.create_contact("Bob", phone="+1234567890") + session = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=contact, + status=FlowSession.STATUS_WAITING, + output_url="http://sessions.com/123.json", + created_on=timezone.now(), + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + + def create_run(status, node_uuid): + return FlowRun.objects.create( + uuid=uuid4(), + org=self.org, + session=session, + flow=flow, + contact=contact, + status=status, + created_on=timezone.now(), + modified_on=timezone.now(), + exited_on=timezone.now() if status not in ("A", "W") else None, + current_node_uuid=node_uuid, + ) + + run1 = create_run(FlowRun.STATUS_ACTIVE, "ebb534e1-e2e0-40e9-8652-d195e87d832b") + run2 = create_run(FlowRun.STATUS_WAITING, "ebb534e1-e2e0-40e9-8652-d195e87d832b") + run3 = create_run(FlowRun.STATUS_WAITING, "bbb71aab-e026-442e-9971-6bc4f48941fb") + create_run(FlowRun.STATUS_INTERRUPTED, "bbb71aab-e026-442e-9971-6bc4f48941fb") + + self.assertEqual( + {"node:ebb534e1-e2e0-40e9-8652-d195e87d832b": 2, "node:bbb71aab-e026-442e-9971-6bc4f48941fb": 1}, + flow.counts.prefix("node:").scope_totals(), + ) + + run1.status = FlowRun.STATUS_EXPIRED + run1.exited_on = timezone.now() + run1.save(update_fields=("status", "exited_on")) + + run3.current_node_uuid = "85b0c928-4bd9-4a2e-84b2-164802c32486" + run3.save(update_fields=("current_node_uuid",)) + + self.assertEqual( + { + "node:ebb534e1-e2e0-40e9-8652-d195e87d832b": 1, + "node:bbb71aab-e026-442e-9971-6bc4f48941fb": 0, + "node:85b0c928-4bd9-4a2e-84b2-164802c32486": 1, + }, + flow.counts.prefix("node:").scope_totals(), + ) + + run2.delete() + + self.assertEqual( + { + "node:ebb534e1-e2e0-40e9-8652-d195e87d832b": 0, + "node:bbb71aab-e026-442e-9971-6bc4f48941fb": 0, + "node:85b0c928-4bd9-4a2e-84b2-164802c32486": 1, + }, + flow.counts.prefix("node:").scope_totals(), + ) + + def test_status_counts(self): + contact = self.create_contact("Bob", phone="+1234567890") + session = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=contact, + status=FlowSession.STATUS_WAITING, + output_url="http://sessions.com/123.json", + created_on=timezone.now(), + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + + def create_runs(flow_status_pairs: tuple) -> list: + runs = [] + for flow, status in flow_status_pairs: + runs.append( + FlowRun( + uuid=uuid4(), + org=self.org, + session=session, + flow=flow, + contact=contact, + status=status, + created_on=timezone.now(), + modified_on=timezone.now(), + exited_on=timezone.now() if status not in ("A", "W") else None, + ) + ) + return FlowRun.objects.bulk_create(runs) + + flow1 = self.create_flow("Test 1") + flow2 = self.create_flow("Test 2") + + runs1 = create_runs( + ( + (flow1, FlowRun.STATUS_ACTIVE), + (flow2, FlowRun.STATUS_WAITING), + (flow1, FlowRun.STATUS_ACTIVE), + (flow2, FlowRun.STATUS_WAITING), + (flow1, FlowRun.STATUS_WAITING), + (flow1, FlowRun.STATUS_COMPLETED), + ) + ) + + self.assertEqual( + {(flow1, "status:A"): 2, (flow2, "status:W"): 2, (flow1, "status:W"): 1, (flow1, "status:C"): 1}, + {(c.flow, c.scope): c.count for c in FlowActivityCount.objects.all()}, + ) + self.assertEqual({"status:A": 2, "status:W": 1, "status:C": 1}, flow1.counts.scope_totals()) + self.assertEqual({"status:W": 2}, flow2.counts.scope_totals()) + + # no difference after squashing + squash_activity_counts() + + self.assertEqual({"status:A": 2, "status:W": 1, "status:C": 1}, flow1.counts.scope_totals()) + self.assertEqual({"status:W": 2}, flow2.counts.scope_totals()) + + runs2 = create_runs( + ( + (flow1, FlowRun.STATUS_ACTIVE), + (flow1, FlowRun.STATUS_ACTIVE), + (flow2, FlowRun.STATUS_EXPIRED), + ) + ) + + self.assertEqual({"status:A": 4, "status:W": 1, "status:C": 1}, flow1.counts.scope_totals()) + self.assertEqual({"status:W": 2, "status:X": 1}, flow2.counts.scope_totals()) + + # bulk update runs like they're being interrupted + FlowRun.objects.filter(id__in=[r.id for r in runs1]).update( + status=FlowRun.STATUS_INTERRUPTED, exited_on=timezone.now() + ) + + self.assertEqual({"status:A": 2, "status:W": 0, "status:C": 0, "status:I": 4}, flow1.counts.scope_totals()) + self.assertEqual({"status:W": 0, "status:X": 1, "status:I": 2}, flow2.counts.scope_totals()) + + # no difference after squashing except zeros gone + squash_activity_counts() + + self.assertEqual({"status:A": 2, "status:I": 4}, flow1.counts.scope_totals()) + self.assertEqual({"status:X": 1, "status:I": 2}, flow2.counts.scope_totals()) + + # do manual deletion of some runs + FlowRun.objects.filter(id__in=[r.id for r in runs2]).update(delete_from_results=True) + FlowRun.objects.filter(id__in=[r.id for r in runs2]).delete() + + self.assertEqual({"status:A": 0, "status:I": 4}, flow1.counts.scope_totals()) + self.assertEqual({"status:X": 0, "status:I": 2}, flow2.counts.scope_totals()) + + # do archival deletion of the rest + FlowRun.objects.filter(id__in=[r.id for r in runs1]).delete() + + # status counts are unchanged + self.assertEqual({"status:A": 0, "status:I": 4}, flow1.counts.scope_totals()) + self.assertEqual({"status:X": 0, "status:I": 2}, flow2.counts.scope_totals()) + + def test_msgsin_counts(self): + flow1 = self.create_flow("Test 1") + flow2 = self.create_flow("Test 2") + + def handle(msg, flow): + msg.status = "H" + msg.flow = flow + msg.save(update_fields=("status", "flow")) + + contact = self.create_contact("Bob", phone="+1234567890") + self.create_outgoing_msg(contact, "Out") # should be ignored + in1 = self.create_incoming_msg(contact, "In 1", status="P") + in2 = self.create_incoming_msg(contact, "In 2", status="P") + in3 = self.create_incoming_msg(contact, "In 3", status="P") + + self.assertEqual(0, flow1.counts.count()) + self.assertEqual(0, flow2.counts.count()) + + handle(in1, flow1) + handle(in2, flow1) + handle(in3, flow2) + + self.assertEqual(6, flow1.counts.count()) + self.assertEqual(3, flow2.counts.count()) + + today = date.today().isoformat() # date as YYYY-MM-DD + dow = date.today().isoweekday() # weekday as 1(Mon)-7(Sun) + hour = timezone.now().astimezone(tzone.utc).hour + + self.assertEqual( + {f"msgsin:date:{today}": 2, f"msgsin:dow:{dow}": 2, f"msgsin:hour:{hour}": 2}, + flow1.counts.filter(scope__startswith="msgsin:").scope_totals(), + ) + self.assertEqual( + {f"msgsin:date:{today}": 1, f"msgsin:dow:{dow}": 1, f"msgsin:hour:{hour}": 1}, + flow2.counts.filter(scope__startswith="msgsin:").scope_totals(), + ) + + # other changes to msgs shouldn't create new counts + in1.archive() + in2.archive() + + self.assertEqual(6, flow1.counts.count()) + self.assertEqual(3, flow2.counts.count()) + + def test_squashing(self): + flow1 = self.create_flow("Test 1") + flow1.counts.create(scope="foo:1", count=1) + flow1.counts.create(scope="foo:1", count=2) + flow1.counts.create(scope="foo:2", count=4) + flow1.counts.create(scope="foo:3", count=-6) + flow1.counts.create(scope="foo:3", count=-1) + + flow2 = self.create_flow("Test 2") + flow2.counts.create(scope="foo:1", count=7) + flow2.counts.create(scope="foo:1", count=3) + flow2.counts.create(scope="foo:2", count=8) # unsquashed that sum to zero + flow2.counts.create(scope="foo:2", count=-8) + flow2.counts.create(scope="foo:3", count=5) + + self.assertEqual(3, flow1.counts.filter(scope="foo:1").sum()) + self.assertEqual(4, flow1.counts.filter(scope="foo:2").sum()) + self.assertEqual(-7, flow1.counts.filter(scope="foo:3").sum()) # negative counts supported + self.assertEqual(0, flow1.counts.filter(scope="foo:4").sum()) # zero if no such scope exists + self.assertEqual(10, flow2.counts.filter(scope="foo:1").sum()) + self.assertEqual(0, flow2.counts.filter(scope="foo:2").sum()) + self.assertEqual(5, flow2.counts.filter(scope="foo:3").sum()) + + squash_activity_counts() + + self.assertEqual({"foo:1", "foo:2", "foo:3"}, set(flow1.counts.values_list("scope", flat=True))) + + # flow2/foo:2 should be gone because it squashed to zero + self.assertEqual({"foo:1", "foo:3"}, set(flow2.counts.values_list("scope", flat=True))) + + self.assertEqual(3, flow1.counts.filter(scope="foo:1").sum()) + self.assertEqual(4, flow1.counts.filter(scope="foo:2").sum()) + self.assertEqual(-7, flow1.counts.filter(scope="foo:3").sum()) + self.assertEqual(10, flow2.counts.filter(scope="foo:1").sum()) + self.assertEqual(0, flow2.counts.filter(scope="foo:2").sum()) + self.assertEqual(5, flow2.counts.filter(scope="foo:3").sum()) + + flow2.counts.create(scope="foo:3", count=-5) # unsquashed zero + squashed zero + + squash_activity_counts() + + # flow2/foo:3 should be gone because it squashed to zero + self.assertEqual({"foo:1"}, set(flow2.counts.values_list("scope", flat=True))) + + # test that model being asked to squash a set that matches no rows doesn't insert anytihng + with connection.cursor() as cursor: + sql, params = FlowActivityCount.get_squash_query({"flow_id": flow1.id, "scope": "foo:9"}) + cursor.execute(sql, params) + + self.assertEqual({"foo:1", "foo:2", "foo:3"}, set(flow1.counts.values_list("scope", flat=True))) diff --git a/temba/flows/tests/test_export.py b/temba/flows/tests/test_export.py new file mode 100644 index 00000000000..10280491175 --- /dev/null +++ b/temba/flows/tests/test_export.py @@ -0,0 +1,1156 @@ +from datetime import timedelta +from unittest.mock import patch + +from openpyxl import load_workbook + +from django.core.files.storage import default_storage +from django.utils import timezone + +from temba.archives.models import Archive +from temba.contacts.models import Contact, ContactURN +from temba.flows.models import Flow, FlowRun, ResultsExport +from temba.orgs.models import Export +from temba.tests import TembaTest, mock_mailroom +from temba.tests.engine import MockSessionWriter +from temba.utils import json +from temba.utils.uuid import uuid4 + + +class ResultsExportTest(TembaTest): + def setUp(self): + super().setUp() + + self.contact = self.create_contact("Eric", phone="+250788382382") + self.contact2 = self.create_contact("Nic", phone="+250788383383") + self.contact3 = self.create_contact("Norbert", phone="+250788123456") + + def _export( + self, + flow, + start_date, + end_date, + responded_only=False, + with_fields=(), + with_groups=(), + extra_urns=(), + has_results=True, + ): + """ + Exports results for the given flow and returns the generated workbook + """ + + readonly_models = {FlowRun} + if has_results: + readonly_models.add(Contact) + readonly_models.add(ContactURN) + + export = ResultsExport.create( + self.org, + self.admin, + start_date, + end_date, + flows=[flow], + with_fields=with_fields, + with_groups=with_groups, + responded_only=responded_only, + extra_urns=extra_urns, + ) + + with self.mockReadOnly(assert_models=readonly_models): + export.perform() + + return load_workbook(filename=default_storage.open(f"orgs/{self.org.id}/results_exports/{export.uuid}.xlsx")) + + @mock_mailroom + def test_export(self, mr_mocks): + today = timezone.now().astimezone(self.org.timezone).date() + + flow = self.get_flow("color_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[4] + color_other = flow_nodes[3] + orange_reply = flow_nodes[1] + + # add a spec for a hidden result to this flow + flow.metadata[Flow.METADATA_RESULTS].append( + { + "key": "_color_classification", + "name": "_Color Classification", + "categories": ["Success", "Skipped", "Failure"], + "node_uuids": [color_split["uuid"]], + } + ) + + age = self.create_field("age", "Age") + devs = self.create_group("Devs", [self.contact]) + + mods = self.contact.update_fields({age: "36"}) + mods += self.contact.update_urns(["tel:+250788382382", "twitter:erictweets"]) + self.contact.modify(self.admin, mods) + + # contact name with an illegal character + self.contact3.name = "Nor\02bert" + self.contact3.save(update_fields=("name",)) + + contact3_run1 = ( + MockSessionWriter(self.contact3, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + contact1_in1 = self.create_incoming_msg(self.contact, "light beige") + contact1_in2 = self.create_incoming_msg(self.contact, "orange") + contact1_run1 = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact1_in1) + .set_result("Color", "light beige", "Other", "light beige") + .visit(color_other) + .send_msg("That is a funny color. Try again.", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact1_in2) + .set_result("Color", "orange", "Orange", "orange") + .set_result("_Color Classification", "orange", "Success", "color_selection") # hidden result + .visit(orange_reply) + .send_msg( + "I love orange too! You said: orange which is category: Orange You are: 0788 382 382 SMS: orange Flow: color: orange", + self.channel, + ) + .complete() + .save() + ).session.runs.get() + + contact2_in1 = self.create_incoming_msg(self.contact2, "green") + contact2_run1 = ( + MockSessionWriter(self.contact2, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact2_in1) + .set_result("Color", "green", "Other", "green") + .visit(color_other) + .send_msg("That is a funny color. Try again.", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + contact2_run2 = ( + MockSessionWriter(self.contact2, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + contact1_in3 = self.create_incoming_msg(self.contact, " blue ") + contact1_run2 = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact1_in3) + .set_result("Color", "blue", "Blue", " blue ") + .visit(orange_reply) + .send_msg("Blue is sad. :(", self.channel) + .complete() + .save() + ).session.runs.get() + + for run in (contact1_run1, contact2_run1, contact3_run1, contact1_run2, contact2_run2): + run.refresh_from_db() + + with self.assertNumQueries(23): + workbook = self._export( + flow, + start_date=today - timedelta(days=7), + end_date=today, + with_groups=[devs], + ) + + # check that notifications were created + export = Export.objects.filter(export_type=ResultsExport.slug).order_by("id").last() + self.assertEqual(1, self.admin.notifications.filter(notification_type="export:finished", export=export).count()) + + tz = self.org.timezone + + (sheet_runs,) = workbook.worksheets + + # check runs sheet... + self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs + self.assertEqual(12, len(list(sheet_runs.columns))) + + self.assertExcelRow( + sheet_runs, + 0, + [ + "Contact UUID", + "Contact Name", + "URN Scheme", + "URN Value", + "Group:Devs", + "Started", + "Modified", + "Exited", + "Run UUID", + "Color (Category) - Colors", + "Color (Value) - Colors", + "Color (Text) - Colors", + ], + ) + + self.assertExcelRow( + sheet_runs, + 1, + [ + contact3_run1.contact.uuid, + "Norbert", + "tel", + "+250788123456", + False, + contact3_run1.created_on, + contact3_run1.modified_on, + "", + contact3_run1.uuid, + "", + "", + "", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 2, + [ + contact1_run1.contact.uuid, + "Eric", + "tel", + "+250788382382", + True, + contact1_run1.created_on, + contact1_run1.modified_on, + contact1_run1.exited_on, + contact1_run1.uuid, + "Orange", + "orange", + "orange", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 3, + [ + contact2_run1.contact.uuid, + "Nic", + "tel", + "+250788383383", + False, + contact2_run1.created_on, + contact2_run1.modified_on, + contact2_run1.exited_on, + contact2_run1.uuid, + "Other", + "green", + "green", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 4, + [ + contact2_run2.contact.uuid, + "Nic", + "tel", + "+250788383383", + False, + contact2_run2.created_on, + contact2_run2.modified_on, + "", + contact2_run2.uuid, + "", + "", + "", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 5, + [ + contact1_run2.contact.uuid, + "Eric", + "tel", + "+250788382382", + True, + contact1_run2.created_on, + contact1_run2.modified_on, + contact1_run2.exited_on, + contact1_run2.uuid, + "Blue", + "blue", + " blue ", + ], + tz, + ) + + # test without unresponded + with self.assertNumQueries(21): + workbook = self._export( + flow, + start_date=today - timedelta(days=7), + end_date=today, + responded_only=True, + with_groups=(devs,), + ) + + tz = self.org.timezone + sheet_runs = workbook.worksheets[0] + + self.assertEqual(4, len(list(sheet_runs.rows))) # header + 3 runs + self.assertEqual(12, len(list(sheet_runs.columns))) + + self.assertExcelRow( + sheet_runs, + 0, + [ + "Contact UUID", + "Contact Name", + "URN Scheme", + "URN Value", + "Group:Devs", + "Started", + "Modified", + "Exited", + "Run UUID", + "Color (Category) - Colors", + "Color (Value) - Colors", + "Color (Text) - Colors", + ], + ) + + self.assertExcelRow( + sheet_runs, + 1, + [ + contact1_run1.contact.uuid, + "Eric", + "tel", + "+250788382382", + True, + contact1_run1.created_on, + contact1_run1.modified_on, + contact1_run1.exited_on, + contact1_run1.uuid, + "Orange", + "orange", + "orange", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 2, + [ + contact2_run1.contact.uuid, + "Nic", + "tel", + "+250788383383", + False, + contact2_run1.created_on, + contact2_run1.modified_on, + contact2_run1.exited_on, + contact2_run1.uuid, + "Other", + "green", + "green", + ], + tz, + ) + + # test export with a contact field + with self.assertNumQueries(25): + workbook = self._export( + flow, + start_date=today - timedelta(days=7), + end_date=today, + with_fields=[age], + with_groups=[devs], + responded_only=True, + extra_urns=["twitter", "line"], + ) + + tz = self.org.timezone + (sheet_runs,) = workbook.worksheets + + # check runs sheet... + self.assertEqual(4, len(list(sheet_runs.rows))) # header + 3 runs + self.assertEqual(15, len(list(sheet_runs.columns))) + + self.assertExcelRow( + sheet_runs, + 0, + [ + "Contact UUID", + "Contact Name", + "URN Scheme", + "URN Value", + "Field:Age", + "Group:Devs", + "URN:Twitter", + "URN:Line", + "Started", + "Modified", + "Exited", + "Run UUID", + "Color (Category) - Colors", + "Color (Value) - Colors", + "Color (Text) - Colors", + ], + ) + + self.assertExcelRow( + sheet_runs, + 1, + [ + contact1_run1.contact.uuid, + "Eric", + "tel", + "+250788382382", + "36", + True, + "erictweets", + "", + contact1_run1.created_on, + contact1_run1.modified_on, + contact1_run1.exited_on, + contact1_run1.uuid, + "Orange", + "orange", + "orange", + ], + tz, + ) + + # test that we don't exceed the limit on rows per sheet + with patch("temba.utils.export.MultiSheetExporter.MAX_EXCEL_ROWS", 4): + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) + expected_sheets = [("Runs 1", 4), ("Runs 2", 3)] + + for s, sheet in enumerate(workbook.worksheets): + self.assertEqual((sheet.title, len(list(sheet.rows))), expected_sheets[s]) + + # test we can export archived flows + flow.is_archived = True + flow.save() + + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) + + (sheet_runs,) = workbook.worksheets + + # check runs sheet... + self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs + self.assertEqual(11, len(list(sheet_runs.columns))) + + def test_anon_org(self): + today = timezone.now().astimezone(self.org.timezone).date() + + with self.anonymous(self.org): + flow = self.get_flow("color_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[4] + + msg_in = self.create_incoming_msg(self.contact, "orange") + + run1 = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=msg_in) + .set_result("Color", "orange", "Orange", "orange") + .send_msg("I love orange too!", self.channel) + .complete() + .save() + ).session.runs.get() + + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) + self.assertEqual(1, len(workbook.worksheets)) + sheet_runs = workbook.worksheets[0] + self.assertExcelRow( + sheet_runs, + 0, + [ + "Contact UUID", + "Contact Name", + "URN Scheme", + "Anon Value", + "Started", + "Modified", + "Exited", + "Run UUID", + "Color (Category) - Colors", + "Color (Value) - Colors", + "Color (Text) - Colors", + ], + ) + + self.assertExcelRow( + sheet_runs, + 1, + [ + self.contact.uuid, + "Eric", + "tel", + self.contact.anon_display, + run1.created_on, + run1.modified_on, + run1.exited_on, + run1.uuid, + "Orange", + "orange", + "orange", + ], + self.org.timezone, + ) + + def test_broadcast_only_flow(self): + flow = self.get_flow("send_only_v13") + send_node = flow.get_definition()["nodes"][0] + today = timezone.now().astimezone(self.org.timezone).date() + + for contact in [self.contact, self.contact2, self.contact3]: + ( + MockSessionWriter(contact, flow) + .visit(send_node) + .send_msg("This is the first message.", self.channel) + .send_msg("This is the second message.", self.channel) + .complete() + .save() + ).session.runs.get() + + for contact in [self.contact, self.contact2]: + ( + MockSessionWriter(contact, flow) + .visit(send_node) + .send_msg("This is the first message.", self.channel) + .send_msg("This is the second message.", self.channel) + .complete() + .save() + ).session.runs.get() + + contact1_run1, contact2_run1, contact3_run1, contact1_run2, contact2_run2 = FlowRun.objects.order_by("id") + + with self.assertNumQueries(17): + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) + + tz = self.org.timezone + + (sheet_runs,) = workbook.worksheets + + # check runs sheet... + self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs + self.assertEqual(8, len(list(sheet_runs.columns))) + + self.assertExcelRow( + sheet_runs, + 0, + ["Contact UUID", "Contact Name", "URN Scheme", "URN Value", "Started", "Modified", "Exited", "Run UUID"], + ) + + self.assertExcelRow( + sheet_runs, + 1, + [ + contact1_run1.contact.uuid, + "Eric", + "tel", + "+250788382382", + contact1_run1.created_on, + contact1_run1.modified_on, + contact1_run1.exited_on, + contact1_run1.uuid, + ], + tz, + ) + self.assertExcelRow( + sheet_runs, + 2, + [ + contact2_run1.contact.uuid, + "Nic", + "tel", + "+250788383383", + contact2_run1.created_on, + contact2_run1.modified_on, + contact2_run1.exited_on, + contact2_run1.uuid, + ], + tz, + ) + self.assertExcelRow( + sheet_runs, + 3, + [ + contact3_run1.contact.uuid, + "Norbert", + "tel", + "+250788123456", + contact3_run1.created_on, + contact3_run1.modified_on, + contact3_run1.exited_on, + contact3_run1.uuid, + ], + tz, + ) + self.assertExcelRow( + sheet_runs, + 4, + [ + contact1_run2.contact.uuid, + "Eric", + "tel", + "+250788382382", + contact1_run2.created_on, + contact1_run2.modified_on, + contact1_run2.exited_on, + contact1_run2.uuid, + ], + tz, + ) + self.assertExcelRow( + sheet_runs, + 5, + [ + contact2_run2.contact.uuid, + "Nic", + "tel", + "+250788383383", + contact2_run2.created_on, + contact2_run2.modified_on, + contact2_run2.exited_on, + contact2_run2.uuid, + ], + tz, + ) + + # test without unresponded + with self.assertNumQueries(10): + workbook = self._export( + flow, + start_date=today - timedelta(days=7), + end_date=today, + responded_only=True, + has_results=False, + ) + + (sheet_runs,) = workbook.worksheets + + self.assertEqual(1, len(list(sheet_runs.rows)), 1) # header; no resposes to a broadcast only flow + self.assertEqual(8, len(list(sheet_runs.columns))) + + self.assertExcelRow( + sheet_runs, + 0, + ["Contact UUID", "Contact Name", "URN Scheme", "URN Value", "Started", "Modified", "Exited", "Run UUID"], + ) + + def test_replaced_rulesets(self): + today = timezone.now().astimezone(self.org.timezone).date() + + favorites = self.get_flow("favorites_v13") + flow_json = favorites.get_definition() + flow_nodes = flow_json["nodes"] + color_prompt = flow_nodes[0] + color_other = flow_nodes[1] + color_split = flow_nodes[2] + beer_prompt = flow_nodes[3] + beer_split = flow_nodes[5] + + contact3_run1 = ( + MockSessionWriter(self.contact3, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + contact1_in1 = self.create_incoming_msg(self.contact, "light beige") + contact1_in2 = self.create_incoming_msg(self.contact, "red") + contact1_run1 = ( + MockSessionWriter(self.contact, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact1_in1) + .set_result("Color", "light beige", "Other", "light beige") + .visit(color_other) + .send_msg("I don't know that color. Try again.", self.channel) + .visit(color_split) + .wait() + .save() + .resume(msg=contact1_in2) + .set_result("Color", "red", "Red", "red") + .visit(beer_prompt) + .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) + .visit(beer_split) + .complete() + .save() + ).session.runs.get() + + devs = self.create_group("Devs", [self.contact]) + + # now remap the uuid for our color + flow_json = json.loads(json.dumps(flow_json).replace(color_split["uuid"], str(uuid4()))) + favorites.save_revision(self.admin, flow_json) + flow_nodes = flow_json["nodes"] + color_prompt = flow_nodes[0] + color_other = flow_nodes[1] + color_split = flow_nodes[2] + + contact2_in1 = self.create_incoming_msg(self.contact2, "green") + contact2_run1 = ( + MockSessionWriter(self.contact2, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact2_in1) + .set_result("Color", "green", "Green", "green") + .visit(beer_prompt) + .send_msg("Good choice, I like Green too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .save() + ).session.runs.get() + + contact2_run2 = ( + MockSessionWriter(self.contact2, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + contact1_in3 = self.create_incoming_msg(self.contact, " blue ") + contact1_run2 = ( + MockSessionWriter(self.contact, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact1_in3) + .set_result("Color", "blue", "Blue", " blue ") + .visit(beer_prompt) + .send_msg("Good choice, I like Blue too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .save() + ).session.runs.get() + + for run in (contact1_run1, contact2_run1, contact3_run1, contact1_run2, contact2_run2): + run.refresh_from_db() + + workbook = self._export(favorites, start_date=today - timedelta(days=7), end_date=today, with_groups=[devs]) + + tz = self.org.timezone + + (sheet_runs,) = workbook.worksheets + + # check runs sheet... + self.assertEqual(6, len(list(sheet_runs.rows))) # header + 5 runs + self.assertEqual(18, len(list(sheet_runs.columns))) + + self.assertExcelRow( + sheet_runs, + 0, + [ + "Contact UUID", + "Contact Name", + "URN Scheme", + "URN Value", + "Group:Devs", + "Started", + "Modified", + "Exited", + "Run UUID", + "Color (Category) - Favorites", + "Color (Value) - Favorites", + "Color (Text) - Favorites", + "Beer (Category) - Favorites", + "Beer (Value) - Favorites", + "Beer (Text) - Favorites", + "Name (Category) - Favorites", + "Name (Value) - Favorites", + "Name (Text) - Favorites", + ], + ) + + self.assertExcelRow( + sheet_runs, + 1, + [ + contact3_run1.contact.uuid, + "Norbert", + "tel", + "+250788123456", + False, + contact3_run1.created_on, + contact3_run1.modified_on, + "", + contact3_run1.uuid, + "", + "", + "", + "", + "", + "", + "", + "", + "", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 2, + [ + contact1_run1.contact.uuid, + "Eric", + "tel", + "+250788382382", + True, + contact1_run1.created_on, + contact1_run1.modified_on, + contact1_run1.exited_on, + contact1_run1.uuid, + "Red", + "red", + "red", + "", + "", + "", + "", + "", + "", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 3, + [ + contact2_run1.contact.uuid, + "Nic", + "tel", + "+250788383383", + False, + contact2_run1.created_on, + contact2_run1.modified_on, + contact2_run1.exited_on, + contact2_run1.uuid, + "Green", + "green", + "green", + "", + "", + "", + "", + "", + "", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 4, + [ + contact2_run2.contact.uuid, + "Nic", + "tel", + "+250788383383", + False, + contact2_run2.created_on, + contact2_run2.modified_on, + "", + contact2_run2.uuid, + "", + "", + "", + "", + "", + "", + "", + "", + "", + ], + tz, + ) + + self.assertExcelRow( + sheet_runs, + 5, + [ + contact1_run2.contact.uuid, + "Eric", + "tel", + "+250788382382", + True, + contact1_run2.created_on, + contact1_run2.modified_on, + "", + contact1_run2.uuid, + "Blue", + "blue", + " blue ", + "", + "", + "", + "", + "", + "", + ], + tz, + ) + + def test_remove_control_characters(self): + today = timezone.now().astimezone(self.org.timezone).date() + + flow = self.get_flow("color_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[4] + color_other = flow_nodes[3] + + msg_in = self.create_incoming_msg(self.contact, "ngert\x07in.") + + run1 = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=msg_in) + .set_result("Color", "ngert\x07in.", "Other", "ngert\x07in.") + .visit(color_other) + .send_msg("That is a funny color. Try again.", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) + tz = self.org.timezone + (sheet_runs,) = workbook.worksheets + + self.assertExcelRow( + sheet_runs, + 1, + [ + run1.contact.uuid, + "Eric", + "tel", + "+250788382382", + run1.created_on, + run1.modified_on, + "", + run1.uuid, + "Other", + "ngertin.", + "ngertin.", + ], + tz, + ) + + def test_from_archives(self): + today = timezone.now().astimezone(self.org.timezone).date() + + flow = self.get_flow("color_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[4] + color_other = flow_nodes[3] + blue_reply = flow_nodes[2] + + contact1_in1 = self.create_incoming_msg(self.contact, "green") + contact1_run = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact1_in1) + .set_result("Color", "green", "Other", "green") + .visit(color_other) + .send_msg("That is a funny color. Try again.", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + contact2_in1 = self.create_incoming_msg(self.contact2, "blue") + contact2_run = ( + MockSessionWriter(self.contact2, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=contact2_in1) + .set_result("Color", "blue", "Blue", "blue") + .visit(blue_reply) + .send_msg("Blue is sad :(.", self.channel) + .complete() + .save() + ).session.runs.get() + + # and a run for a different flow + flow2 = self.get_flow("favorites_v13") + flow2_nodes = flow2.get_definition()["nodes"] + + contact2_other_flow = ( + MockSessionWriter(self.contact2, flow2) + .visit(flow2_nodes[0]) + .send_msg("Color???", self.channel) + .visit(flow2_nodes[2]) + .wait() + .save() + ).session.runs.get() + + contact3_run = ( + MockSessionWriter(self.contact3, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + # we now have 4 runs in this order of modified_on + contact1_run.refresh_from_db() + contact2_run.refresh_from_db() + contact2_other_flow.refresh_from_db() + contact3_run.refresh_from_db() + + # archive the first 3 runs, using 'old' archive format that used a list of values for one of them + old_archive_format = contact2_run.as_archive_json() + old_archive_format["values"] = [old_archive_format["values"]] + + self.create_archive( + Archive.TYPE_FLOWRUN, + "D", + timezone.now().date(), + [contact1_run.as_archive_json(), old_archive_format, contact2_other_flow.as_archive_json()], + ) + + contact1_run.delete() + contact2_run.delete() + + # create an archive earlier than our flow created date so we check that it isn't included + self.create_archive( + Archive.TYPE_FLOWRUN, + "D", + timezone.now().date() - timedelta(days=2), + [contact2_run.as_archive_json()], + ) + + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today) + + tz = self.org.timezone + (sheet_runs,) = workbook.worksheets + + # check runs sheet... + self.assertEqual(4, len(list(sheet_runs.rows))) # header + 3 runs + + self.assertExcelRow( + sheet_runs, + 1, + [ + contact1_run.contact.uuid, + "Eric", + "tel", + "+250788382382", + contact1_run.created_on, + contact1_run.modified_on, + "", + contact1_run.uuid, + "Other", + "green", + "green", + ], + tz, + ) + self.assertExcelRow( + sheet_runs, + 2, + [ + contact2_run.contact.uuid, + "Nic", + "tel", + "+250788383383", + contact2_run.created_on, + contact2_run.modified_on, + contact2_run.exited_on, + contact2_run.uuid, + "Blue", + "blue", + "blue", + ], + tz, + ) + self.assertExcelRow( + sheet_runs, + 3, + [ + contact3_run.contact.uuid, + "Norbert", + "tel", + "+250788123456", + contact3_run.created_on, + contact3_run.modified_on, + "", + contact3_run.uuid, + "", + "", + "", + ], + tz, + ) + + def test_no_responses(self): + today = timezone.now().astimezone(self.org.timezone).date() + flow = self.create_flow("Test") + + self.assertEqual(flow.get_run_stats()["total"], 0) + + workbook = self._export(flow, start_date=today - timedelta(days=7), end_date=today, has_results=False) + + self.assertEqual(len(workbook.worksheets), 1) + + # every sheet has only the head row + self.assertEqual(1, len(list(workbook.worksheets[0].rows))) + self.assertEqual(8, len(list(workbook.worksheets[0].columns))) diff --git a/temba/flows/tests/test_flow.py b/temba/flows/tests/test_flow.py new file mode 100644 index 00000000000..7b8555f4feb --- /dev/null +++ b/temba/flows/tests/test_flow.py @@ -0,0 +1,939 @@ +from datetime import datetime, timezone as tzone +from unittest.mock import patch + +from django.urls import reverse +from django.utils import timezone + +from temba.api.models import Resthook +from temba.campaigns.models import Campaign, CampaignEvent +from temba.classifiers.models import Classifier +from temba.contacts.models import URN, ContactField, ContactGroup +from temba.flows.models import ( + Flow, + FlowCategoryCount, + FlowRun, + FlowSession, + FlowStart, + FlowStartCount, + FlowUserConflictException, + FlowVersionConflictException, +) +from temba.flows.tasks import squash_flow_counts, update_session_wait_expires +from temba.globals.models import Global +from temba.orgs.integrations.dtone import DTOneType +from temba.tests import CRUDLTestMixin, TembaTest, matchers +from temba.tests.engine import MockSessionWriter +from temba.triggers.models import Trigger +from temba.utils import json +from temba.utils.uuid import uuid4 + + +class FlowTest(TembaTest, CRUDLTestMixin): + def setUp(self): + super().setUp() + + self.contact = self.create_contact("Eric", phone="+250788382382") + self.contact2 = self.create_contact("Nic", phone="+250788383383") + self.contact3 = self.create_contact("Norbert", phone="+250788123456") + self.contact4 = self.create_contact("Teeh", phone="+250788123457", language="por") + + self.other_group = self.create_group("Other", []) + + def test_get_unique_name(self): + self.assertEqual("Testing", Flow.get_unique_name(self.org, "Testing")) + + # ensure checking against existing flows is case-insensitive + testing = self.create_flow("TESTING") + + self.assertEqual("Testing 2", Flow.get_unique_name(self.org, "Testing")) + self.assertEqual("Testing", Flow.get_unique_name(self.org, "Testing", ignore=testing)) + self.assertEqual("Testing", Flow.get_unique_name(self.org2, "Testing")) # different org + + self.create_flow("Testing 2") + + self.assertEqual("Testing 3", Flow.get_unique_name(self.org, "Testing")) + + # ensure we don't exceed the name length limit + self.create_flow("X" * 64) + + self.assertEqual(f"{'X' * 62} 2", Flow.get_unique_name(self.org, "X" * 64)) + + def test_clean_name(self): + self.assertEqual("Hello", Flow.clean_name("Hello\0")) + self.assertEqual("Hello/n", Flow.clean_name("Hello\\n")) + self.assertEqual("Say 'Hi'", Flow.clean_name('Say "Hi"')) + self.assertEqual("x" * 64, Flow.clean_name("x" * 100)) + self.assertEqual("a b", Flow.clean_name(f"a{' ' * 32}b{' ' * 32}c")) + + @patch("temba.mailroom.queue_interrupt") + def test_archive(self, mock_queue_interrupt): + flow = self.create_flow("Test") + flow.archive(self.admin) + + mock_queue_interrupt.assert_called_once_with(self.org, flow=flow) + + flow.refresh_from_db() + self.assertEqual(flow.is_archived, True) + self.assertEqual(flow.is_active, True) + + @patch("temba.mailroom.queue_interrupt") + def test_release(self, mock_queue_interrupt): + global1 = Global.get_or_create(self.org, self.admin, "api_key", "API Key", "234325") + flow = self.create_flow("Test") + flow.global_dependencies.add(global1) + + flow.release(self.admin) + + mock_queue_interrupt.assert_called_once_with(self.org, flow=flow) + + flow.refresh_from_db() + self.assertTrue(flow.name.startswith("deleted-")) + self.assertFalse(flow.is_archived) + self.assertFalse(flow.is_active) + self.assertEqual(0, flow.global_dependencies.count()) + + def test_get_definition(self): + favorites = self.get_flow("favorites_v13") + + # fill the definition with junk metadata + rev = favorites.get_current_revision() + rev.definition["uuid"] = "Nope" + rev.definition["name"] = "Not the name" + rev.definition["revision"] = 1234567 + rev.definition["expire_after_minutes"] = 7654 + rev.save(update_fields=("definition",)) + + # definition should use values from flow db object + definition = favorites.get_definition() + self.assertEqual(definition["uuid"], str(favorites.uuid)) + self.assertEqual(definition["name"], "Favorites") + self.assertEqual(definition["revision"], 1) + self.assertEqual(definition["expire_after_minutes"], 720) + + # when saving a new revision we overwrite metadata + favorites.save_revision(self.admin, rev.definition) + rev = favorites.get_current_revision() + self.assertEqual(rev.definition["uuid"], str(favorites.uuid)) + self.assertEqual(rev.definition["name"], "Favorites") + self.assertEqual(rev.definition["revision"], 2) + self.assertEqual(rev.definition["expire_after_minutes"], 720) + + # can't get definition of a flow with no revisions + favorites.revisions.all().delete() + self.assertRaises(AssertionError, favorites.get_definition) + + def test_ensure_current_version(self): + # importing migrates to latest spec version + flow = self.get_flow("favorites_v13") + self.assertEqual("13.6.1", flow.version_number) + self.assertEqual(1, flow.revisions.count()) + + # rewind one spec version.. + flow.version_number = "13.0.0" + flow.save(update_fields=("version_number",)) + rev = flow.revisions.get() + rev.definition["spec_version"] = "13.0.0" + rev.spec_version = "13.0.0" + rev.save() + + old_modified_on = flow.modified_on + old_saved_on = flow.saved_on + + flow.ensure_current_version() + + # check we migrate to current spec version + self.assertEqual("13.6.1", flow.version_number) + self.assertEqual(2, flow.revisions.count()) + self.assertEqual("system", flow.revisions.order_by("id").last().created_by.username) + + # saved on won't have been updated but modified on will + self.assertEqual(old_saved_on, flow.saved_on) + self.assertGreater(flow.modified_on, old_modified_on) + + def test_flow_archive_with_campaign(self): + self.login(self.admin) + self.get_flow("the_clinic") + + campaign = Campaign.objects.get(name="Appointment Schedule") + flow = Flow.objects.get(name="Confirm Appointment") + + campaign_event = CampaignEvent.objects.filter(flow=flow, campaign=campaign).first() + self.assertIsNotNone(campaign_event) + + # do not archive if the campaign is active + Flow.apply_action_archive(self.admin, Flow.objects.filter(pk=flow.pk)) + + flow.refresh_from_db() + self.assertFalse(flow.is_archived) + + campaign.is_archived = True + campaign.save() + + # can archive if the campaign is archived + Flow.apply_action_archive(self.admin, Flow.objects.filter(pk=flow.pk)) + + flow.refresh_from_db() + self.assertTrue(flow.is_archived) + + campaign.is_archived = False + campaign.save() + + flow.is_archived = False + flow.save() + + campaign_event.is_active = False + campaign_event.save() + + # can archive if the campaign is not archived with no active event + Flow.apply_action_archive(self.admin, Flow.objects.filter(pk=flow.pk)) + + flow.refresh_from_db() + self.assertTrue(flow.is_archived) + + def test_editor(self): + flow = self.create_flow("Test") + + self.login(self.admin) + + flow_editor_url = reverse("flows.flow_editor", args=[flow.uuid]) + + response = self.client.get(flow_editor_url) + + self.assertTrue(response.context["mutable"]) + self.assertTrue(response.context["can_start"]) + self.assertTrue(response.context["can_simulate"]) + self.assertContains(response, reverse("flows.flow_simulate", args=[flow.id])) + self.assertContains(response, 'id="rp-flow-editor"') + + # flows that are archived can't be edited, started or simulated + self.login(self.admin) + + flow.is_archived = True + flow.save(update_fields=("is_archived",)) + + response = self.client.get(flow_editor_url) + + self.assertFalse(response.context["mutable"]) + self.assertFalse(response.context["can_start"]) + self.assertFalse(response.context["can_simulate"]) + + def test_editor_feature_filters(self): + flow = self.create_flow("Test") + + self.login(self.admin) + + def assert_features(features: set): + response = self.client.get(reverse("flows.flow_editor", args=[flow.uuid])) + self.assertEqual(features, set(json.loads(response.context["feature_filters"]))) + + # add a resthook + Resthook.objects.create(org=flow.org, created_by=self.admin, modified_by=self.admin) + assert_features({"resthook"}) + + # add an NLP classifier + Classifier.objects.create(org=flow.org, config="", created_by=self.admin, modified_by=self.admin) + assert_features({"classifier", "resthook"}) + + # add a DT One integration + DTOneType().connect(flow.org, self.admin, "login", "token") + assert_features({"airtime", "classifier", "resthook"}) + + # change our channel to use a whatsapp scheme + self.channel.schemes = [URN.WHATSAPP_SCHEME] + self.channel.save() + assert_features({"whatsapp", "airtime", "classifier", "resthook"}) + + # change our channel to use a facebook scheme + self.channel.schemes = [URN.FACEBOOK_SCHEME] + self.channel.save() + assert_features({"facebook", "optins", "airtime", "classifier", "resthook"}) + + self.setUpLocations() + + assert_features({"facebook", "optins", "airtime", "classifier", "resthook", "locations"}) + + def test_save_revision(self): + self.login(self.admin) + self.client.post( + reverse("flows.flow_create"), {"name": "Go Flow", "flow_type": Flow.TYPE_MESSAGE, "base_language": "eng"} + ) + flow = Flow.objects.get( + org=self.org, name="Go Flow", flow_type=Flow.TYPE_MESSAGE, version_number=Flow.CURRENT_SPEC_VERSION + ) + + # can't save older spec version over newer + definition = flow.revisions.order_by("id").last().definition + definition["spec_version"] = Flow.FINAL_LEGACY_VERSION + + with self.assertRaises(FlowVersionConflictException): + flow.save_revision(self.admin, definition) + + # can't save older revision over newer + definition["spec_version"] = Flow.CURRENT_SPEC_VERSION + definition["revision"] = 0 + + with self.assertRaises(FlowUserConflictException): + flow.save_revision(self.admin, definition) + + def test_clone(self): + flow = self.create_flow("123456789012345678901234567890123456789012345678901234567890") # 60 chars + flow.expires_after_minutes = 60 + flow.save(update_fields=("expires_after_minutes",)) + + copy1 = flow.clone(self.admin) + + self.assertNotEqual(flow.id, copy1.id) + self.assertEqual(60, copy1.expires_after_minutes) + + # name should start with "Copy of" and be truncated to 64 chars + self.assertEqual("Copy of 12345678901234567890123456789012345678901234567890123456", copy1.name) + + # cloning again should generate a unique name + copy2 = flow.clone(self.admin) + self.assertEqual("Copy of 123456789012345678901234567890123456789012345678901234 2", copy2.name) + copy3 = flow.clone(self.admin) + self.assertEqual("Copy of 123456789012345678901234567890123456789012345678901234 3", copy3.name) + + # ensure that truncating doesn't leave trailing spaces + flow2 = self.create_flow("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabc efghijkl") + copy2 = flow2.clone(self.admin) + self.assertEqual("Copy of abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabc", copy2.name) + + def test_copy_group_split_no_name(self): + flow = self.get_flow("group_split_no_name") + flow_def = flow.get_definition() + + copy = flow.clone(self.admin) + copy_def = copy.get_definition() + + self.assertEqual(len(copy_def["nodes"]), 1) + self.assertEqual(len(copy_def["nodes"][0]["router"]["cases"]), 1) + self.assertEqual( + copy_def["nodes"][0]["router"]["cases"][0], + { + "uuid": matchers.UUID4String(), + "type": "has_group", + "arguments": [matchers.UUID4String()], + "category_uuid": matchers.UUID4String(), + }, + ) + + # check that the original and the copy reference the same group + self.assertEqual( + flow_def["nodes"][0]["router"]["cases"][0]["arguments"], + copy_def["nodes"][0]["router"]["cases"][0]["arguments"], + ) + + def test_get_activity(self): + flow1 = self.create_flow("Test 1") + flow2 = self.create_flow("Test 2") + + flow1.counts.create(scope="node:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=3) + flow1.counts.create(scope="node:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=1) + flow1.counts.create(scope="node:400d6b5e-c963-42a1-a06c-50bb9b1e38b1", count=5) + + flow1.counts.create( + scope="segment:1fff74f4-c81f-4f4c-a03d-58d113c17da1:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=3 + ) + flow1.counts.create( + scope="segment:1fff74f4-c81f-4f4c-a03d-58d113c17da1:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=4 + ) + flow1.counts.create( + scope="segment:6f607948-f3f0-4a6a-94b8-7fdd877895ca:400d6b5e-c963-42a1-a06c-50bb9b1e38b1", count=5 + ) + flow2.counts.create( + scope="segment:a4fe3ada-b062-47e4-be58-bcbe1bca31b4:74a53ff4-fe63-4d89-875e-cae3caca177c", count=6 + ) + + self.assertEqual( + ( + {"01c175da-d23d-40a4-a845-c4a9bb4b481a": 4, "400d6b5e-c963-42a1-a06c-50bb9b1e38b1": 5}, + { + "1fff74f4-c81f-4f4c-a03d-58d113c17da1:01c175da-d23d-40a4-a845-c4a9bb4b481a": 7, + "6f607948-f3f0-4a6a-94b8-7fdd877895ca:400d6b5e-c963-42a1-a06c-50bb9b1e38b1": 5, + }, + ), + flow1.get_activity(), + ) + self.assertEqual( + ({}, {"a4fe3ada-b062-47e4-be58-bcbe1bca31b4:74a53ff4-fe63-4d89-875e-cae3caca177c": 6}), flow2.get_activity() + ) + + def test_get_category_counts(self): + def assertCount(counts, result_key, category_name, truth): + found = False + for count in counts: + if count["key"] == result_key: + categories = count["categories"] + for category in categories: + if category["name"] == category_name: + found = True + self.assertEqual(category["count"], truth) + self.assertTrue(found) + + favorites = self.get_flow("favorites_v13") + flow_nodes = favorites.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_other = flow_nodes[1] + color_split = flow_nodes[2] + beer_prompt = flow_nodes[3] + beer_split = flow_nodes[5] + name_prompt = flow_nodes[6] + name_split = flow_nodes[7] + + # add in some fake data + for i in range(0, 10): + contact = self.create_contact("Contact %d" % i, phone="+120655530%d" % i) + ( + MockSessionWriter(contact, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "blue")) + .set_result("Color", "blue", "Blue", "blue") + .visit(beer_prompt) + .send_msg("Good choice, I like Blue too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "primus")) + .set_result("Beer", "primus", "Primus", "primus") + .visit(name_prompt) + .send_msg("Lastly, what is your name?", self.channel) + .visit(name_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "russell")) + .set_result("Name", "russell", "All Responses", "russell") + .complete() + .save() + ) + + for i in range(0, 5): + contact = self.create_contact("Contact %d" % i, phone="+120655531%d" % i) + ( + MockSessionWriter(contact, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "red")) + .set_result("Color", "red", "Red", "red") + .visit(beer_prompt) + .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "primus")) + .set_result("Beer", "primus", "Primus", "primus") + .visit(name_prompt) + .send_msg("Lastly, what is your name?", self.channel) + .visit(name_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "earl")) + .set_result("Name", "earl", "All Responses", "earl") + .complete() + .save() + ) + + # test update flow values + for i in range(0, 5): + contact = self.create_contact("Contact %d" % i, phone="+120655532%d" % i) + ( + MockSessionWriter(contact, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "orange")) + .set_result("Color", "orange", "Other", "orange") + .visit(color_other) + .send_msg("I don't know that one, try again please.", self.channel) + .visit(color_split) + .wait() + .save() + .resume(msg=self.create_incoming_msg(contact, "green")) + .set_result("Color", "green", "Green", "green") + .visit(beer_prompt) + .send_msg("Good choice, I like Green too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "skol")) + .set_result("Beer", "skol", "Skol", "skol") + .visit(name_prompt) + .send_msg("Lastly, what is your name?", self.channel) + .visit(name_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "bobby")) + .set_result("Name", "bobby", "All Responses", "bobby") + .complete() + .save() + ) + + counts = favorites.get_category_counts() + + assertCount(counts, "color", "Blue", 10) + assertCount(counts, "color", "Red", 5) + assertCount(counts, "beer", "Primus", 15) + + # name shouldn't be included since it's open ended + self.assertNotIn('"name": "Name"', json.dumps(counts)) + + # five oranges went back and became greens + assertCount(counts, "color", "Other", 0) + assertCount(counts, "color", "Green", 5) + + # now remap the uuid for our color node + flow_json = favorites.get_definition() + flow_json = json.loads(json.dumps(flow_json).replace(color_split["uuid"], str(uuid4()))) + flow_nodes = flow_json["nodes"] + color_prompt = flow_nodes[0] + color_other = flow_nodes[1] + color_split = flow_nodes[2] + + favorites.save_revision(self.admin, flow_json) + + # send a few more runs through our updated flow + for i in range(0, 3): + contact = self.create_contact("Contact %d" % i, phone="+120655533%d" % i) + ( + MockSessionWriter(contact, favorites) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "red")) + .set_result("Color", "red", "Red", "red") + .visit(beer_prompt) + .send_msg("Good choice, I like Red too! What is your favorite beer?", self.channel) + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(contact, "turbo")) + .set_result("Beer", "turbo", "Turbo King", "turbo") + .visit(name_prompt) + .wait() + .save() + ) + + # should now have three more reds + counts = favorites.get_category_counts() + assertCount(counts, "color", "Red", 8) + assertCount(counts, "beer", "Turbo King", 3) + + # now delete the color split and repoint nodes to the beer split + flow_json["nodes"].pop(2) + for node in flow_json["nodes"]: + for exit in node["exits"]: + if exit.get("destination_uuid") == color_split["uuid"]: + exit["destination_uuid"] = beer_split["uuid"] + + favorites.save_revision(self.admin, flow_json) + + # now the color counts have been removed, but beer is still there + counts = favorites.get_category_counts() + self.assertEqual(["beer"], [c["key"] for c in counts]) + assertCount(counts, "beer", "Turbo King", 3) + + # make sure it still works after ze squashings + self.assertEqual(76, FlowCategoryCount.objects.all().count()) + squash_flow_counts() + self.assertEqual(9, FlowCategoryCount.objects.all().count()) + counts = favorites.get_category_counts() + assertCount(counts, "beer", "Turbo King", 3) + + # test tostring + str(FlowCategoryCount.objects.all().first()) + + # and if we delete our runs, things zero out + for run in FlowRun.objects.all(): + run.delete() + + counts = favorites.get_category_counts() + assertCount(counts, "beer", "Turbo King", 0) + + def test_category_counts_with_null_categories(self): + flow = self.get_flow("color_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[4] + + msg = self.create_incoming_msg(self.contact, "blue") + run = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=msg) + .set_result("Color", "blue", "Blue", "blue") + .complete() + .save() + ).session.runs.get() + + FlowCategoryCount.objects.get(category_name="Blue", result_name="Color", result_key="color", count=1) + + # get our run and clear the category + run = FlowRun.objects.get(flow=flow, contact=self.contact) + results = run.results + del results["color"]["category"] + results["color"]["created_on"] = timezone.now() + run.save(update_fields=["results", "modified_on"]) + + # should have added a negative one now + self.assertEqual(2, FlowCategoryCount.objects.filter(category_name="Blue", result_name="Color").count()) + FlowCategoryCount.objects.get(category_name="Blue", result_name="Color", result_key="color", count=-1) + + def test_start_counts(self): + # create start for 10 contacts + flow = self.create_flow("Test") + start = FlowStart.objects.create(org=self.org, flow=flow, created_by=self.admin) + for i in range(10): + start.contacts.add(self.create_contact("Bob", urns=[f"twitter:bobby{i}"])) + + # create runs for first 5 + for c in start.contacts.order_by("id")[:5]: + MockSessionWriter(contact=c, flow=flow, start=start).wait().save() + + # check our count + self.assertEqual(FlowStartCount.get_count(start), 5) + + # create runs for last 5 + for c in start.contacts.order_by("id")[5:]: + MockSessionWriter(contact=c, flow=flow, start=start).wait().save() + + # check our count + self.assertEqual(FlowStartCount.get_count(start), 10) + + # squash them + FlowStartCount.squash() + self.assertEqual(FlowStartCount.get_count(start), 10) + + def test_flow_keyword_update(self): + self.login(self.admin) + flow = Flow.create(self.org, self.admin, "Flow") + flow.flow_type = Flow.TYPE_SURVEY + flow.save() + + # keywords aren't an option for survey flows + response = self.client.get(reverse("flows.flow_update", args=[flow.pk])) + self.assertNotIn("keyword_triggers", response.context["form"].fields) + self.assertNotIn("ignore_triggers", response.context["form"].fields) + + # send update with triggers and ignore flag anyways + post_data = dict() + post_data["name"] = "Flow With Keyword Triggers" + post_data["keyword_triggers"] = "notallowed" + post_data["ignore_keywords"] = True + post_data["expires_after_minutes"] = 60 * 12 + response = self.client.post(reverse("flows.flow_update", args=[flow.pk]), post_data, follow=True) + + # still shouldn't have any triggers + flow.refresh_from_db() + self.assertFalse(flow.ignore_triggers) + self.assertEqual(0, flow.triggers.all().count()) + + def test_flow_update_of_inactive_flow(self): + flow = self.get_flow("favorites") + flow.release(self.admin) + + post_data = {"name": "Flow that does not exist"} + + self.login(self.admin) + response = self.client.post(reverse("flows.flow_update", args=[flow.pk]), post_data) + + # can't delete already released flow + self.assertEqual(response.status_code, 404) + + def test_importing_dependencies(self): + # create channel to be matched by name + channel = self.create_channel("TG", "RapidPro Test", "12345324635") + + flow = self.get_flow("dependencies_v13") + flow_def = flow.get_definition() + + # global should have been created with blank value + self.assertTrue(self.org.globals.filter(name="Org Name", key="org_name", value="").exists()) + + # topic should have been created too + self.assertTrue(self.org.topics.filter(name="Support").exists()) + + # fields created with type if exists in export + self.assertTrue(self.org.fields.filter(key="cat_breed", name="Cat Breed", value_type="T").exists()) + self.assertTrue(self.org.fields.filter(key="french_age", value_type="N").exists()) + + # reference to channel changed to match existing channel by name + self.assertEqual( + {"uuid": str(channel.uuid), "name": "RapidPro Test"}, flow_def["nodes"][0]["actions"][4]["channel"] + ) + + # reference to classifier unchanged since it doesn't exist + self.assertEqual( + {"uuid": "891a1c5d-1140-4fd0-bd0d-a919ea25abb6", "name": "Feelings"}, + flow_def["nodes"][7]["actions"][0]["classifier"], + ) + + def test_flow_metadata(self): + # test importing both old and new flow formats + for flow_file in ("favorites", "favorites_v13"): + flow = self.get_flow(flow_file) + + self.assertEqual( + flow.metadata["results"], + [ + { + "key": "color", + "name": "Color", + "categories": ["Red", "Green", "Blue", "Cyan", "Other"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "beer", + "name": "Beer", + "categories": ["Mutzig", "Primus", "Turbo King", "Skol", "Other"], + "node_uuids": [matchers.UUID4String()], + }, + { + "key": "name", + "name": "Name", + "categories": ["All Responses"], + "node_uuids": [matchers.UUID4String()], + }, + ], + ) + self.assertEqual(len(flow.metadata["parent_refs"]), 0) + + def test_group_send(self): + # create an inactive group with the same name, to test that this doesn't blow up our import + group = ContactGroup.get_or_create(self.org, self.admin, "Survey Audience") + group.release(self.admin) + + # and create another as well + ContactGroup.get_or_create(self.org, self.admin, "Survey Audience") + + # fetching a flow with a group send shouldn't throw + self.get_flow("group_send_flow") + + def test_flow_delete_of_inactive_flow(self): + flow = self.create_flow("Test") + flow.release(self.admin) + + self.login(self.admin) + response = self.client.post(reverse("flows.flow_delete", args=[flow.pk])) + + # can't delete already released flow + self.assertEqual(response.status_code, 404) + + def test_delete(self): + flow = self.get_flow("favorites_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[2] + beer_prompt = flow_nodes[3] + beer_split = flow_nodes[5] + + # create a campaign that contains this flow + friends = self.create_group("Friends", []) + poll_date = self.create_field("poll_date", "Poll Date", value_type=ContactField.TYPE_DATETIME) + + campaign = Campaign.create(self.org, self.admin, Campaign.get_unique_name(self.org, "Favorite Poll"), friends) + event1 = CampaignEvent.create_flow_event( + self.org, self.admin, campaign, poll_date, offset=0, unit="D", flow=flow, delivery_hour="13" + ) + + # create a trigger that contains this flow + trigger = Trigger.create( + self.org, self.admin, Trigger.TYPE_KEYWORD, flow, keywords=["poll"], match_type=Trigger.MATCH_FIRST_WORD + ) + + # run the flow + ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(self.contact, "RED")) + .visit(beer_prompt) + .visit(beer_split) + .wait() + .save() + ) + + # run it again to completion + joe = self.create_contact("Joe", phone="1234") + ( + MockSessionWriter(joe, flow) + .visit(color_prompt) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(joe, "green")) + .visit(beer_prompt) + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(joe, "primus")) + .complete() + .save() + ) + + # try to remove the flow, not logged in, no dice + response = self.client.post(reverse("flows.flow_delete", args=[flow.uuid])) + self.assertLoginRedirect(response) + + # login as admin + self.login(self.admin) + response = self.client.post(reverse("flows.flow_delete", args=[flow.uuid])) + self.assertEqual(200, response.status_code) + + # flow should no longer be active + flow.refresh_from_db() + self.assertFalse(flow.is_active) + + # runs should not be deleted + self.assertEqual(flow.runs.count(), 2) + + # our campaign event and trigger should no longer be active + event1.refresh_from_db() + self.assertFalse(event1.is_active) + + trigger.refresh_from_db() + self.assertFalse(trigger.is_active) + + def test_delete_with_dependencies(self): + self.login(self.admin) + + self.get_flow("dependencies") + self.get_flow("dependencies_voice") + parent = Flow.objects.filter(name="Dependencies").first() + child = Flow.objects.filter(name="Child Flow").first() + voice = Flow.objects.filter(name="Voice Dependencies").first() + + contact_fields = ( + {"key": "contact_age", "name": "Contact Age"}, + # fields based on parent and child references + {"key": "top"}, + {"key": "bottom"}, + # replies + {"key": "chw"}, + # url attachemnts + {"key": "attachment"}, + # dynamic groups + {"key": "cat_breed", "name": "Cat Breed"}, + {"key": "organization"}, + # sending messages + {"key": "recipient"}, + {"key": "message"}, + # sending emails + {"key": "email_message", "name": "Email Message"}, + {"key": "subject"}, + # trigger someone else + {"key": "other_phone", "name": "Other Phone"}, + # rules and localizations + {"key": "rule"}, + {"key": "french_rule", "name": "French Rule"}, + {"key": "french_age", "name": "French Age"}, + {"key": "french_fries", "name": "French Fries"}, + # updating contacts + {"key": "favorite_cat", "name": "Favorite Cat"}, + {"key": "next_cat_fact", "name": "Next Cat Fact"}, + {"key": "last_cat_fact", "name": "Last Cat Fact"}, + # webhook urls + {"key": "webhook"}, + # expression splits + {"key": "expression_split", "name": "Expression Split"}, + # voice says + {"key": "play_message", "name": "Play Message", "flow": voice}, + {"key": "voice_rule", "name": "Voice Rule", "flow": voice}, + # voice plays (recordings) + {"key": "voice_recording", "name": "Voice Recording", "flow": voice}, + ) + + for field_spec in contact_fields: + key = field_spec.get("key") + name = field_spec.get("name", key.capitalize()) + flow = field_spec.get("flow", parent) + + # make sure our field exists after import + field = self.org.fields.filter(key=key, name=name, is_system=False, is_proxy=False).first() + self.assertIsNotNone(field, "Couldn't find field %s (%s)" % (key, name)) + + # and our flow is dependent on us + self.assertIsNotNone( + flow.field_dependencies.filter(key__in=[key]).first(), + "Flow is missing dependency on %s (%s)" % (key, name), + ) + + # we can delete our child flow and the parent ('Dependencies') will be marked as having issues + self.client.post(reverse("flows.flow_delete", args=[child.uuid])) + + parent = Flow.objects.filter(name="Dependencies").get() + child.refresh_from_db() + + self.assertFalse(child.is_active) + self.assertTrue(parent.has_issues) + self.assertNotIn(child, parent.flow_dependencies.all()) + + # deleting our parent flow should also work + self.client.post(reverse("flows.flow_delete", args=[parent.uuid])) + + parent.refresh_from_db() + self.assertFalse(parent.is_active) + self.assertEqual(0, parent.field_dependencies.all().count()) + self.assertEqual(0, parent.flow_dependencies.all().count()) + self.assertEqual(0, parent.group_dependencies.all().count()) + + def test_update_expiration_task(self): + flow1 = self.create_flow("Test 1") + flow2 = self.create_flow("Test 2") + + # create waiting session and run for flow 1 + session1 = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=self.contact, + current_flow=flow1, + status=FlowSession.STATUS_WAITING, + output_url="http://sessions.com/123.json", + wait_started_on=datetime(2022, 1, 1, 0, 0, 0, 0, tzone.utc), + wait_expires_on=datetime(2022, 1, 2, 0, 0, 0, 0, tzone.utc), + wait_resume_on_expire=False, + ) + + # create non-waiting session for flow 1 + session2 = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=self.contact, + current_flow=flow1, + status=FlowSession.STATUS_COMPLETED, + output_url="http://sessions.com/234.json", + wait_started_on=datetime(2022, 1, 1, 0, 0, 0, 0, tzone.utc), + wait_expires_on=None, + wait_resume_on_expire=False, + ended_on=timezone.now(), + ) + + # create waiting session for flow 2 + session3 = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=self.contact, + current_flow=flow2, + status=FlowSession.STATUS_WAITING, + output_url="http://sessions.com/345.json", + wait_started_on=datetime(2022, 1, 1, 0, 0, 0, 0, tzone.utc), + wait_expires_on=datetime(2022, 1, 2, 0, 0, 0, 0, tzone.utc), + wait_resume_on_expire=False, + ) + + # update flow 1 expires to 2 hours + flow1.expires_after_minutes = 120 + flow1.save(update_fields=("expires_after_minutes",)) + + update_session_wait_expires(flow1.id) + + # new session expiration should be wait_started_on + 1 hour + session1.refresh_from_db() + self.assertEqual(datetime(2022, 1, 1, 2, 0, 0, 0, tzone.utc), session1.wait_expires_on) + + # other sessions should be unchanged + session2.refresh_from_db() + session3.refresh_from_db() + self.assertIsNone(session2.wait_expires_on) + self.assertEqual(datetime(2022, 1, 2, 0, 0, 0, 0, tzone.utc), session3.wait_expires_on) diff --git a/temba/flows/tests/test_flowcrudl.py b/temba/flows/tests/test_flowcrudl.py new file mode 100644 index 00000000000..9a9782046ef --- /dev/null +++ b/temba/flows/tests/test_flowcrudl.py @@ -0,0 +1,2023 @@ +import io +from datetime import date, datetime, timedelta, timezone as tzone +from unittest.mock import patch + +from django_redis import get_redis_connection + +from django.test.utils import override_settings +from django.urls import reverse + +from temba import mailroom +from temba.contacts.models import URN +from temba.flows.models import Flow, FlowLabel, FlowStart, FlowUserConflictException, ResultsExport +from temba.msgs.models import SystemLabel +from temba.orgs.models import Export +from temba.templates.models import TemplateTranslation +from temba.tests import CRUDLTestMixin, TembaTest, matchers, mock_mailroom +from temba.tests.base import get_contact_search +from temba.triggers.models import Trigger +from temba.utils.uuid import uuid4 +from temba.utils.views.mixins import TEMBA_MENU_SELECTION + + +class FlowCRUDLTest(TembaTest, CRUDLTestMixin): + def test_menu(self): + menu_url = reverse("flows.flow_menu") + + FlowLabel.create(self.org, self.admin, "Important") + + self.assertRequestDisallowed(menu_url, [None, self.agent]) + self.assertPageMenu( + menu_url, + self.admin, + [ + "Active", + "Archived", + "Globals", + ("History", ["Starts", "Webhooks"]), + ("Labels", ["Important (0)"]), + ], + ) + + def test_create(self): + create_url = reverse("flows.flow_create") + self.create_flow("Registration") + + self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) + response = self.assertCreateFetch( + create_url, + [self.editor, self.admin], + form_fields=["name", "keyword_triggers", "flow_type", "base_language"], + ) + + # check flow type options + self.assertEqual( + [ + (Flow.TYPE_MESSAGE, "Messaging"), + (Flow.TYPE_VOICE, "Phone Call"), + (Flow.TYPE_BACKGROUND, "Background"), + ], + response.context["form"].fields["flow_type"].choices, + ) + + # try to submit without name or language + self.assertCreateSubmit( + create_url, + self.admin, + {"flow_type": "M"}, + form_errors={"name": "This field is required.", "base_language": "This field is required."}, + ) + + # try to submit with a name that contains disallowed characters + self.assertCreateSubmit( + create_url, + self.admin, + {"name": '"Registration"', "flow_type": "M", "base_language": "eng"}, + form_errors={"name": 'Cannot contain the character: "'}, + ) + + # try to submit with a name that is too long + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "X" * 65, "flow_type": "M", "base_language": "eng"}, + form_errors={"name": "Ensure this value has at most 64 characters (it has 65)."}, + ) + + # try to submit with a name that is already used + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Registration", "flow_type": "M", "base_language": "eng"}, + form_errors={"name": "Already used by another flow."}, + ) + + response = self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Flow 1", "flow_type": "M", "base_language": "eng"}, + new_obj_query=Flow.objects.filter(org=self.org, flow_type="M", name="Flow 1"), + ) + + flow1 = Flow.objects.get(name="Flow 1") + self.assertEqual(1, flow1.revisions.all().count()) + + self.assertRedirect(response, reverse("flows.flow_editor", args=[flow1.uuid])) + + def test_create_with_keywords(self): + create_url = reverse("flows.flow_create") + + # try creating a flow with invalid keywords + self.assertCreateSubmit( + create_url, + self.admin, + { + "name": "Flow #1", + "base_language": "eng", + "keyword_triggers": ["toooooooooooooolong", "test"], + "flow_type": Flow.TYPE_MESSAGE, + }, + form_errors={ + "keyword_triggers": "Must be single words, less than 16 characters, containing only letters and numbers." + }, + ) + + # submit with valid keywords + self.assertCreateSubmit( + create_url, + self.admin, + { + "name": "Flow 1", + "base_language": "eng", + "keyword_triggers": ["testing", "test"], + "flow_type": Flow.TYPE_MESSAGE, + }, + new_obj_query=Flow.objects.filter(org=self.org, name="Flow 1", flow_type="M"), + ) + + # check the created keyword trigger + flow1 = Flow.objects.get(name="Flow 1") + self.assertEqual(1, flow1.triggers.count()) + self.assertEqual(1, flow1.triggers.filter(trigger_type="K", keywords=["testing", "test"]).count()) + + # try to create another flow with one of the same keywords + self.assertCreateSubmit( + create_url, + self.admin, + { + "name": "Flow 2", + "base_language": "eng", + "keyword_triggers": ["test"], + "flow_type": Flow.TYPE_MESSAGE, + }, + form_errors={"keyword_triggers": '"test" is already used for another flow.'}, + ) + + # add a group to the existing trigger + group = self.create_group("Testers", contacts=[]) + flow1.triggers.get().groups.add(group) + + # and now it's no longer a conflict + self.assertCreateSubmit( + create_url, + self.admin, + { + "name": "Flow 2", + "base_language": "eng", + "keyword_triggers": ["test"], + "flow_type": Flow.TYPE_MESSAGE, + }, + new_obj_query=Flow.objects.filter(org=self.org, name="Flow 2", flow_type="M"), + ) + + # check the created keyword triggers + flow2 = Flow.objects.get(name="Flow 2") + self.assertEqual([["test"]], list(flow2.triggers.order_by("id").values_list("keywords", flat=True))) + + def test_views(self): + create_url = reverse("flows.flow_create") + + self.create_contact("Eric", phone="+250788382382") + flow = self.create_flow("Test") + + # create a flow for another org + other_flow = Flow.create(self.org2, self.admin2, "Flow2") + + # no login, no list + response = self.client.get(reverse("flows.flow_list")) + self.assertLoginRedirect(response) + + user = self.admin + user.first_name = "Test" + user.last_name = "Contact" + user.save() + self.login(user) + + self.assertContentMenu(reverse("flows.flow_list"), self.user, ["Export"]) + + self.assertContentMenu( + reverse("flows.flow_list"), + self.admin, + ["New Flow", "New Label", "Import", "Export"], + ) + + # list, should have only one flow (the one created in setUp) + response = self.client.get(reverse("flows.flow_list")) + self.assertEqual(1, len(response.context["object_list"])) + + # inactive list shouldn't have any flows + response = self.client.get(reverse("flows.flow_archived")) + self.assertEqual(0, len(response.context["object_list"])) + + # also shouldn't be able to view other flow + response = self.client.get(reverse("flows.flow_editor", args=[other_flow.uuid])) + self.assertEqual(404, response.status_code) + + # get our create page + response = self.client.get(create_url) + self.assertTrue(response.context["has_flows"]) + + # create a new regular flow + response = self.client.post( + create_url, {"name": "Flow 1", "flow_type": Flow.TYPE_MESSAGE, "base_language": "eng"} + ) + self.assertEqual(302, response.status_code) + + # check we've been redirected to the editor and we have a revision + flow1 = Flow.objects.get(org=self.org, name="Flow 1") + self.assertEqual(f"/flow/editor/{flow1.uuid}/", response.url) + self.assertEqual(1, flow1.revisions.all().count()) + self.assertEqual(Flow.TYPE_MESSAGE, flow1.flow_type) + self.assertEqual(10080, flow1.expires_after_minutes) + + # add a trigger on this flow + trigger = Trigger.create( + self.org, + self.admin, + Trigger.TYPE_KEYWORD, + flow1, + keywords=["unique"], + match_type=Trigger.MATCH_FIRST_WORD, + ) + + # create a new voice flow + response = self.client.post( + create_url, {"name": "Voice Flow", "flow_type": Flow.TYPE_VOICE, "base_language": "eng"} + ) + voice_flow = Flow.objects.get(org=self.org, name="Voice Flow") + self.assertEqual(response.status_code, 302) + self.assertEqual(voice_flow.flow_type, "V") + + # default expiration for voice is shorter + self.assertEqual(voice_flow.expires_after_minutes, 5) + + # test flows with triggers + # create a new flow with one unformatted keyword + response = self.client.post( + create_url, + { + "name": "Flow With Unformated Keyword Triggers", + "keyword_triggers": ["this is", "it"], + "base_language": "eng", + }, + ) + self.assertFormError( + response.context["form"], + "keyword_triggers", + "Must be single words, less than 16 characters, containing only letters and numbers.", + ) + + # create a new flow with one existing keyword + response = self.client.post( + create_url, {"name": "Flow With Existing Keyword Triggers", "keyword_triggers": ["this", "is", "unique"]} + ) + self.assertFormError(response.context["form"], "keyword_triggers", '"unique" is already used for another flow.') + + # create another trigger so there are two in the way + trigger = Trigger.create( + self.org, + self.admin, + Trigger.TYPE_KEYWORD, + flow1, + keywords=["this"], + match_type=Trigger.MATCH_FIRST_WORD, + ) + + response = self.client.post( + create_url, {"name": "Flow With Existing Keyword Triggers", "keyword_triggers": ["this", "is", "unique"]} + ) + self.assertFormError( + response.context["form"], "keyword_triggers", '"this", "unique" are already used for another flow.' + ) + trigger.delete() + + # create a new flow with keywords + response = self.client.post( + create_url, + { + "name": "Flow With Good Keyword Triggers", + "base_language": "eng", + "keyword_triggers": ["this", "is", "it"], + "flow_type": Flow.TYPE_MESSAGE, + "expires_after_minutes": 30, + }, + ) + flow3 = Flow.objects.get(name="Flow With Good Keyword Triggers") + + # check we're being redirected to the editor view + self.assertRedirect(response, reverse("flows.flow_editor", args=[flow3.uuid])) + + # can see results for a flow + response = self.client.get(reverse("flows.flow_results", args=[flow.id])) + self.assertEqual(200, response.status_code) + + # check flow listing + response = self.client.get(reverse("flows.flow_list")) + self.assertEqual(list(response.context["object_list"]), [flow3, voice_flow, flow1, flow]) # by saved_on + + # test update view + response = self.client.post(reverse("flows.flow_update", args=[flow.id])) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.context["form"].fields), 5) + self.assertIn("name", response.context["form"].fields) + self.assertIn("keyword_triggers", response.context["form"].fields) + self.assertIn("ignore_triggers", response.context["form"].fields) + + # test ivr flow creation + self.channel.role = "SRCA" + self.channel.save() + + response = self.client.post( + create_url, + { + "name": "Message flow", + "base_language": "eng", + "expires_after_minutes": 5, + "flow_type": Flow.TYPE_MESSAGE, + }, + ) + msg_flow = Flow.objects.get(name="Message flow") + + self.assertEqual(302, response.status_code) + self.assertEqual(msg_flow.flow_type, Flow.TYPE_MESSAGE) + + response = self.client.post( + create_url, + {"name": "Call flow", "base_language": "eng", "expires_after_minutes": 5, "flow_type": Flow.TYPE_VOICE}, + ) + call_flow = Flow.objects.get(name="Call flow") + + self.assertEqual(302, response.status_code) + self.assertEqual(call_flow.flow_type, Flow.TYPE_VOICE) + + # test creating a flow with base language + self.org.set_flow_languages(self.admin, ["eng"]) + + response = self.client.post( + create_url, + { + "name": "Language Flow", + "expires_after_minutes": 5, + "base_language": "eng", + "flow_type": Flow.TYPE_MESSAGE, + }, + ) + + language_flow = Flow.objects.get(name="Language Flow") + + self.assertEqual(302, response.status_code) + self.assertEqual(language_flow.base_language, "eng") + + def test_update_messaging_flow(self): + flow = self.create_flow("Test") + update_url = reverse("flows.flow_update", args=[flow.id]) + + def assert_triggers(expected: list): + actual = list(flow.triggers.filter(trigger_type="K", is_active=True).values("keywords", "is_archived")) + self.assertCountEqual(actual, expected) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields={ + "name": "Test", + "keyword_triggers": [], + "expires_after_minutes": 10080, + "ignore_triggers": False, + }, + ) + + # try to update with empty name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "", "expires_after_minutes": 10, "ignore_triggers": True}, + form_errors={"name": "This field is required."}, + object_unchanged=flow, + ) + + # update all fields + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "New Name", + "keyword_triggers": ["test", "help"], + "expires_after_minutes": 10, + "ignore_triggers": True, + }, + ) + + flow.refresh_from_db() + self.assertEqual("New Name", flow.name) + self.assertEqual(10, flow.expires_after_minutes) + self.assertTrue(flow.ignore_triggers) + + assert_triggers([{"keywords": ["test", "help"], "is_archived": False}]) + + # remove one keyword and add another + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "New Name", + "keyword_triggers": ["help", "support"], + "expires_after_minutes": 10, + "ignore_triggers": True, + }, + ) + + assert_triggers( + [ + {"keywords": ["test", "help"], "is_archived": True}, + {"keywords": ["help", "support"], "is_archived": False}, + ] + ) + + # put "test" keyword back and remove "support" + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "New Name", + "keyword_triggers": ["test", "help"], + "expires_after_minutes": 10, + "ignore_triggers": True, + }, + ) + + assert_triggers( + [ + {"keywords": ["test", "help"], "is_archived": False}, + {"keywords": ["help", "support"], "is_archived": True}, + ] + ) + + # add channel filter to active trigger + support = flow.triggers.get(is_archived=False) + support.channel = self.channel + support.save(update_fields=("channel",)) + + # re-adding "support" will now restore that trigger + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "New Name", + "keyword_triggers": ["test", "help", "support"], + "expires_after_minutes": 10, + "ignore_triggers": True, + }, + ) + + assert_triggers( + [ + {"keywords": ["test", "help"], "is_archived": False}, + {"keywords": ["help", "support"], "is_archived": False}, + ] + ) + + def test_update_voice_flow(self): + flow = self.create_flow("IVR Test", flow_type=Flow.TYPE_VOICE) + update_url = reverse("flows.flow_update", args=[flow.id]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch( + update_url, + [self.editor, self.admin], + form_fields=["name", "keyword_triggers", "expires_after_minutes", "ignore_triggers", "ivr_retry"], + ) + + # try to update with an expires value which is only for messaging flows and an invalid retry value + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "New Name", "expires_after_minutes": 720, "ignore_triggers": True, "ivr_retry": 1234}, + form_errors={ + "expires_after_minutes": "Select a valid choice. 720 is not one of the available choices.", + "ivr_retry": "Select a valid choice. 1234 is not one of the available choices.", + }, + object_unchanged=flow, + ) + + # update name and contact creation option to be per login + self.assertUpdateSubmit( + update_url, + self.admin, + { + "name": "New Name", + "keyword_triggers": ["test", "help"], + "expires_after_minutes": 10, + "ignore_triggers": True, + "ivr_retry": 30, + }, + ) + + flow.refresh_from_db() + self.assertEqual("New Name", flow.name) + self.assertEqual(10, flow.expires_after_minutes) + self.assertTrue(flow.ignore_triggers) + self.assertEqual(30, flow.metadata.get("ivr_retry")) + self.assertEqual(1, flow.triggers.count()) + self.assertEqual(1, flow.triggers.filter(keywords=["test", "help"]).count()) + + # check we still have that value after saving a new revision + flow.save_revision(self.admin, flow.get_definition()) + self.assertEqual(30, flow.metadata["ivr_retry"]) + + def test_update_surveyor_flow(self): + flow = self.create_flow("Survey", flow_type=Flow.TYPE_SURVEY) + update_url = reverse("flows.flow_update", args=[flow.id]) + + # we should only see name and contact creation option on form + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=["name", "contact_creation"]) + + # update name and contact creation option to be per login + self.assertUpdateSubmit(update_url, self.admin, {"name": "New Name", "contact_creation": "login"}) + + flow.refresh_from_db() + self.assertEqual("New Name", flow.name) + self.assertEqual("login", flow.metadata.get("contact_creation")) + + def test_update_background_flow(self): + flow = self.create_flow("Background", flow_type=Flow.TYPE_BACKGROUND) + update_url = reverse("flows.flow_update", args=[flow.id]) + + # we should only see name on form + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=["name"]) + + # update name and contact creation option to be per login + self.assertUpdateSubmit(update_url, self.admin, {"name": "New Name"}) + + flow.refresh_from_db() + self.assertEqual("New Name", flow.name) + + def test_list_views(self): + flow1 = self.create_flow("Flow 1") + flow2 = self.create_flow("Flow 2") + + # archive second flow + flow2.is_archived = True + flow2.save(update_fields=("is_archived",)) + + flow3 = self.create_flow("Flow 3") + + self.login(self.admin) + + # see our trigger on the list page + response = self.client.get(reverse("flows.flow_list")) + self.assertContains(response, flow1.name) + self.assertContains(response, flow3.name) + + # archive it + response = self.client.post(reverse("flows.flow_list"), {"action": "archive", "objects": flow1.id}) + self.assertEqual(200, response.status_code) + + # flow should no longer appear in list + response = self.client.get(reverse("flows.flow_list")) + self.assertNotContains(response, flow1.name) + self.assertContains(response, flow3.name) + + self.assertEqual(("archive", "label", "export-results"), response.context["actions"]) + + # but does appear in archived list + response = self.client.get(reverse("flows.flow_archived")) + self.assertContains(response, flow1.name) + + # flow2 should appear before flow since it was created later + self.assertTrue(flow2, response.context["object_list"][0]) + self.assertTrue(flow1, response.context["object_list"][1]) + + # unarchive it + response = self.client.post(reverse("flows.flow_archived"), {"action": "restore", "objects": flow1.id}) + self.assertEqual(200, response.status_code) + + # flow should no longer appear in archived list + response = self.client.get(reverse("flows.flow_archived")) + self.assertNotContains(response, flow1.name) + self.assertEqual(("restore",), response.context["actions"]) + + # but does appear in normal list + response = self.client.get(reverse("flows.flow_list")) + self.assertContains(response, flow1.name) + self.assertContains(response, flow3.name) + + # can label flows + label1 = FlowLabel.create(self.org, self.admin, "Important") + + response = self.client.post( + reverse("flows.flow_list"), {"action": "label", "objects": flow1.id, "label": label1.id} + ) + + self.assertEqual(200, response.status_code) + self.assertEqual({label1}, set(flow1.labels.all())) + self.assertEqual({flow1}, set(label1.flows.all())) + + # and unlabel + response = self.client.post( + reverse("flows.flow_list"), {"action": "label", "objects": flow1.id, "label": label1.id, "add": False} + ) + + self.assertEqual(200, response.status_code) + + flow1.refresh_from_db() + self.assertEqual(set(), set(flow1.labels.all())) + + # voice flows should be included in the count + Flow.objects.filter(id=flow1.id).update(flow_type=Flow.TYPE_VOICE) + + response = self.client.get(reverse("flows.flow_list")) + self.assertContains(response, flow1.name) + + # single message flow (flom campaign) should not be included in counts and not even on this list + Flow.objects.filter(id=flow1.id).update(is_system=True) + + response = self.client.get(reverse("flows.flow_list")) + self.assertNotContains(response, flow1.name) + + # single message flow should not be even in the archived list + Flow.objects.filter(id=flow1.id).update(is_system=True, is_archived=True) + + response = self.client.get(reverse("flows.flow_archived")) + self.assertNotContains(response, flow1.name) + + def test_filter(self): + flow1 = self.create_flow("Flow 1") + flow2 = self.create_flow("Flow 2") + + label1 = FlowLabel.create(self.org, self.admin, "Important") + label2 = FlowLabel.create(self.org, self.admin, "Very Important") + + label1.toggle_label([flow1, flow2], add=True) + label2.toggle_label([flow2], add=True) + + self.login(self.admin) + + response = self.client.get(reverse("flows.flow_filter", args=[label1.uuid])) + self.assertEqual([flow2, flow1], list(response.context["object_list"])) + self.assertEqual(("label", "export-results"), response.context["actions"]) + + response = self.client.get(reverse("flows.flow_filter", args=[label2.uuid])) + self.assertEqual([flow2], list(response.context["object_list"])) + + response = self.client.get(reverse("flows.flow_filter", args=[label2.uuid])) + self.assertEqual(f"/flow/labels/{label2.uuid}", response.headers.get(TEMBA_MENU_SELECTION)) + + def test_get_definition(self): + flow = self.get_flow("color_v13") + + # if definition is outdated, metadata values are updated from db object + flow.name = "Amazing Flow" + flow.save(update_fields=("name",)) + + self.assertEqual("Amazing Flow", flow.get_definition()["name"]) + + # make a flow that looks like a legacy flow + flow = self.get_flow("color_v11") + original_def = self.load_json("test_flows/color_v11.json")["flows"][0] + + flow.version_number = "11.12" + flow.save(update_fields=("version_number",)) + + revision = flow.revisions.get() + revision.definition = original_def + revision.spec_version = "11.12" + revision.save(update_fields=("definition", "spec_version")) + + self.assertIn("metadata", flow.get_definition()) + + # if definition is outdated, metadata values are updated from db object + flow.name = "Amazing Flow 2" + flow.save(update_fields=("name",)) + + self.assertEqual("Amazing Flow 2", flow.get_definition()["metadata"]["name"]) + + # metadata section can be missing too + del original_def["metadata"] + revision.definition = original_def + revision.save(update_fields=("definition",)) + + self.assertEqual("Amazing Flow 2", flow.get_definition()["metadata"]["name"]) + + def test_revisions(self): + flow = self.get_flow("color_v11") + + revisions_url = reverse("flows.flow_revisions", args=[flow.uuid]) + + original_def = self.load_json("test_flows/color_v11.json")["flows"][0] + + # rewind definition to legacy spec + revision = flow.revisions.get() + revision.definition = original_def + revision.spec_version = "11.12" + revision.save(update_fields=("definition", "spec_version")) + + # create a new migrated revision + flow_def = revision.get_migrated_definition() + flow.save_revision(self.admin, flow_def) + + revisions = list(flow.revisions.all().order_by("-created_on")) + + # now we should have two revisions + self.assertEqual(2, len(revisions)) + self.assertEqual(2, revisions[0].revision) + self.assertEqual(Flow.CURRENT_SPEC_VERSION, revisions[0].spec_version) + self.assertEqual(1, revisions[1].revision) + self.assertEqual("11.12", revisions[1].spec_version) + + self.assertRequestDisallowed(revisions_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(revisions_url, [self.user, self.editor, self.admin]) + self.assertEqual( + [ + { + "user": {"email": "admin@textit.com", "name": "Andy"}, + "created_on": matchers.ISODate(), + "id": revisions[0].id, + "version": Flow.CURRENT_SPEC_VERSION, + "revision": 2, + }, + { + "user": {"email": "admin@textit.com", "name": "Andy"}, + "created_on": matchers.ISODate(), + "id": revisions[1].id, + "version": "11.12", + "revision": 1, + }, + ], + response.json()["results"], + ) + + # fetch a specific revision + response = self.assertReadFetch(f"{revisions_url}{revisions[0].id}/", [self.user, self.editor, self.admin]) + + # make sure we can read the definition + definition = response.json()["definition"] + self.assertEqual("und", definition["language"]) + + # fetch the legacy revision + response = self.client.get(f"{revisions_url}{revisions[1].id}/") + + # should automatically migrate to latest spec + self.assertEqual(Flow.CURRENT_SPEC_VERSION, response.json()["definition"]["spec_version"]) + + # but we can also limit how far it is migrated + response = self.client.get(f"{revisions_url}{revisions[1].id}/?version=13.0.0") + + # should only have been migrated to that version + self.assertEqual("13.0.0", response.json()["definition"]["spec_version"]) + + # check 404 for invalid revision number + response = self.requestView(f"{revisions_url}12345678/", self.admin) + self.assertEqual(404, response.status_code) + + def test_save_revisions(self): + flow = self.create_flow("Go Flow") + revisions_url = reverse("flows.flow_revisions", args=[flow.uuid]) + + self.login(self.admin) + response = self.client.get(revisions_url) + self.assertEqual(1, len(response.json())) + + definition = flow.revisions.all().first().definition + + # viewers can't save flows + self.login(self.user) + response = self.client.post(revisions_url, definition, content_type="application/json") + self.assertEqual(403, response.status_code) + + # check that we can create a new revision + self.login(self.admin) + response = self.client.post(revisions_url, definition, content_type="application/json") + new_revision = response.json() + self.assertEqual(2, new_revision["revision"][Flow.DEFINITION_REVISION]) + + # but we can't save our old revision + response = self.client.post(revisions_url, definition, content_type="application/json") + self.assertResponseError( + response, "description", "Your changes will not be saved until you refresh your browser" + ) + + # or save an old version + definition = flow.revisions.all().first().definition + definition[Flow.DEFINITION_SPEC_VERSION] = "11.12" + response = self.client.post(revisions_url, definition, content_type="application/json") + self.assertResponseError(response, "description", "Your flow has been upgraded to the latest version") + + def test_inactive_flow(self): + flow = self.create_flow("Deleted") + flow.release(self.admin) + + self.login(self.admin) + + response = self.client.get(reverse("flows.flow_revisions", args=[flow.uuid])) + + self.assertEqual(404, response.status_code) + + response = self.client.get(reverse("flows.flow_activity", args=[flow.uuid])) + + self.assertEqual(404, response.status_code) + + @mock_mailroom + def test_preview_start(self, mr_mocks): + flow = self.create_flow("Test Flow") + self.create_field("age", "Age") + self.create_contact("Ann", phone="+16302222222", fields={"age": 40}) + self.create_contact("Bob", phone="+16303333333", fields={"age": 33}) + + mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) + + preview_url = reverse("flows.flow_preview_start", args=[flow.id]) + + self.login(self.editor) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + self.assertEqual( + { + "query": 'age > 30 AND status = "active" AND history != "Test Flow"', + "total": 100, + "send_time": 10.0, + "warnings": [], + "blockers": [], + }, + response.json(), + ) + + mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) + self.login(self.customer_support, choose_org=self.org) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + self.assertEqual( + { + "query": 'age > 30 AND status = "active" AND history != "Test Flow"', + "total": 100, + "send_time": 10.0, + "warnings": [], + "blockers": [], + }, + response.json(), + ) + + mr_mocks.flow_start_preview( + query='age > 30 AND status = "active" AND history != "Test Flow" AND flow = ""', total=100 + ) + preview_url = reverse("flows.flow_preview_start", args=[flow.id]) + + self.login(self.editor) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True, "in_a_flow": True}, + }, + content_type="application/json", + ) + self.assertEqual( + { + "query": 'age > 30 AND status = "active" AND history != "Test Flow" AND flow = ""', + "total": 100, + "send_time": 10.0, + "warnings": [], + "blockers": [], + }, + response.json(), + ) + + # try with a bad query + mr_mocks.exception(mailroom.QueryValidationException("mismatched input at (((", "syntax")) + + response = self.client.post( + preview_url, + { + "query": "(((", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + self.assertEqual(400, response.status_code) + self.assertEqual({"query": "", "total": 0, "error": "Invalid query syntax."}, response.json()) + + # suspended orgs should block + self.org.suspend() + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post(preview_url, {"query": "age > 30"}, content_type="application/json") + self.assertEqual( + [ + "Sorry, your workspace is currently suspended. To re-enable starting flows and sending messages, please contact support." + ], + response.json()["blockers"], + ) + + # flagged orgs should block + self.org.unsuspend() + self.org.flag() + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post(preview_url, {"query": "age > 30"}, content_type="application/json") + self.assertEqual( + [ + "Sorry, your workspace is currently flagged. To re-enable starting flows and sending messages, please contact support." + ], + response.json()["blockers"], + ) + + self.org.unflag() + + # create a pending flow start to test warning + FlowStart.create(flow, self.admin, query="age > 30") + + mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + + self.assertEqual( + [ + "A flow is already starting. To avoid confusion, make sure you are not targeting the same contacts before continuing." + ], + response.json()["warnings"], + ) + + ivr_flow = self.create_flow("IVR Test", flow_type=Flow.TYPE_VOICE) + + preview_url = reverse("flows.flow_preview_start", args=[ivr_flow.id]) + + # shouldn't be able to since we don't have a call channel + self.org.flow_starts.all().delete() + mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "IVR Test"', total=100) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["blockers"][0], + 'To start this flow you need to add a voice channel to your workspace which will allow you to make and receive calls.', + ) + + # if we have too many messages in our outbox we should block + self.org.counts.create(scope=f"msgs:folder:{SystemLabel.TYPE_OUTBOX}", count=1_000_001) + preview_url = reverse("flows.flow_preview_start", args=[flow.id]) + mr_mocks.flow_start_preview(query="age > 30", total=1000) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + }, + content_type="application/json", + ) + self.assertEqual( + [ + "You have too many messages queued in your outbox. Please wait for these messages to send and then try again." + ], + response.json()["blockers"], + ) + self.org.counts.prefix("msgs:folder:").delete() + + # check warning for lots of contacts + preview_url = reverse("flows.flow_preview_start", args=[flow.id]) + + # with patch("temba.orgs.models.Org.get_estimated_send_time") as mock_get_estimated_send_time: + with override_settings(SEND_HOURS_WARNING=24, SEND_HOURS_BLOCK=48): + + # we send at 10 tps, so make the total take 24 hours + expected_tps = 10 + mr_mocks.flow_start_preview( + query='age > 30 AND status = "active" AND history != "Test Flow"', total=24 * 60 * 60 * expected_tps + ) + + # mock_get_estimated_send_time.return_value = timedelta(days=2) + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["warnings"][0], + "Your channels will likely take over a day to reach all of the selected contacts. Consider selecting fewer contacts before continuing.", + ) + + # now really long so it should block + mr_mocks.flow_start_preview( + query='age > 30 AND status = "active" AND history != "Test Flow"', total=3 * 24 * 60 * 60 * expected_tps + ) + # mock_get_estimated_send_time.return_value = timedelta(days=7) + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["blockers"][0], + "Your channels cannot send fast enough to reach all of the selected contacts in a reasonable time. Select fewer contacts to continue.", + ) + + # if we release our send channel we also can't start a regular messaging flow + self.channel.release(self.admin) + mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Test Flow"', total=100) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True}, + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["blockers"][0], + 'To start this flow you need to add a channel to your workspace which will allow you to send messages to your contacts.', + ) + + flow = self.create_flow("Background Flow", flow_type=Flow.TYPE_BACKGROUND) + mr_mocks.flow_start_preview(query='age > 30 AND status = "active" AND history != "Background Flow"', total=100) + preview_url = reverse("flows.flow_preview_start", args=[flow.id]) + + self.login(self.editor) + + response = self.client.post( + preview_url, + { + "query": "age > 30", + "exclusions": {"non_active": True, "started_previously": True, "in_a_flow": True}, + }, + content_type="application/json", + ) + self.assertEqual( + { + "query": 'age > 30 AND status = "active" AND history != "Background Flow"', + "total": 100, + "send_time": 0.0, + "warnings": [], + "blockers": [], + }, + response.json(), + ) + + @mock_mailroom + def test_template_warnings(self, mr_mocks): + self.login(self.admin) + flow = self.get_flow("whatsapp_template") + + # bring up broadcast dialog + self.login(self.admin) + + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + # no warning, we don't have a whatsapp channel + self.assertEqual(response.json()["warnings"], []) + + # change our channel to use a whatsapp scheme + self.channel.schemes = [URN.WHATSAPP_SCHEME] + self.channel.channel_type = "TWA" + self.channel.save() + + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + # no warning, we don't have a whatsapp channel that requires a message template + self.assertEqual(response.json()["warnings"], []) + + self.channel.channel_type = "WA" + self.channel.save() + + # clear dependencies, this will cause our flow to look like it isn't using templates + metadata = flow.metadata + flow.metadata = {} + flow.save(update_fields=["metadata"]) + + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["warnings"], + [ + "This flow does not use message templates. You may still start this flow but WhatsApp contacts who have not sent an incoming message in the last 24 hours may not receive it." + ], + ) + + # restore our dependency + flow.metadata = metadata + flow.save(update_fields=["metadata"]) + + # template doesn't exit, will be warned + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["warnings"], + ["The message template affirmation does not exist on your account and cannot be sent."], + ) + + # create the template, but no translations + template = self.create_template("affirmation", [], uuid="f712e05c-bbed-40f1-b3d9-671bb9b60775") + + # will be warned again + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["warnings"], ["Your message template affirmation is not approved and cannot be sent."] + ) + + # create a translation, but not approved + TemplateTranslation.objects.create( + template=template, + channel=self.channel, + locale="eng-US", + status=TemplateTranslation.STATUS_REJECTED, + external_id="id1", + external_locale="en_US", + namespace="foo_namespace", + components=[{"name": "body", "type": "body/text", "content": "Hello", "variables": {}, "params": []}], + variables=[], + ) + + # will be warned again + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + self.assertEqual( + response.json()["warnings"], ["Your message template affirmation is not approved and cannot be sent."] + ) + + # finally, set our translation to approved + TemplateTranslation.objects.update(status=TemplateTranslation.STATUS_APPROVED) + + # no warnings + mr_mocks.flow_start_preview(query="age > 30", total=2) + response = self.client.post( + reverse("flows.flow_preview_start", args=[flow.id]), + { + "query": "age > 30", + }, + content_type="application/json", + ) + + self.assertEqual(response.json()["warnings"], []) + + @mock_mailroom + def test_start(self, mr_mocks): + contact = self.create_contact("Bob", phone="+593979099111") + flow = self.create_flow("Test") + start_url = f"{reverse('flows.flow_start', args=[])}?flow={flow.id}" + + self.assertRequestDisallowed(start_url, [None, self.user, self.agent]) + self.assertUpdateFetch(start_url, [self.editor, self.admin], form_fields=["flow", "contact_search"]) + + # create flow start with a query + mr_mocks.contact_parse_query("frank", cleaned='name ~ "frank"') + self.assertUpdateSubmit( + start_url, + self.admin, + {"flow": flow.id, "contact_search": get_contact_search(query="frank")}, + ) + + start = FlowStart.objects.get() + self.assertEqual(flow, start.flow) + self.assertEqual(FlowStart.STATUS_PENDING, start.status) + self.assertEqual({}, start.exclusions) + self.assertEqual('name ~ "frank"', start.query) + + self.assertEqual(1, len(mr_mocks.queued_batch_tasks)) + self.assertEqual("start_flow", mr_mocks.queued_batch_tasks[0]["type"]) + + FlowStart.objects.all().delete() + + # create flow start with a bogus query + mr_mocks.exception(mailroom.QueryValidationException("query contains an error", "syntax")) + self.assertUpdateSubmit( + start_url, + self.admin, + {"flow": flow.id, "contact_search": get_contact_search(query='name = "frank')}, + form_errors={"contact_search": "Invalid query syntax."}, + object_unchanged=flow, + ) + + # try missing contacts + self.assertUpdateSubmit( + start_url, + self.admin, + {"flow": flow.id, "contact_search": get_contact_search(contacts=[])}, + form_errors={"contact_search": "Contacts or groups are required."}, + object_unchanged=flow, + ) + + # try to create with an empty query + self.assertUpdateSubmit( + start_url, + self.admin, + {"flow": flow.id, "contact_search": get_contact_search(query="")}, + form_errors={"contact_search": "A contact query is required."}, + object_unchanged=flow, + ) + + query = f"uuid='{contact.uuid}'" + mr_mocks.contact_parse_query(query, cleaned=query) + + # create flow start with exclude_in_other and exclude_reruns both left unchecked + self.assertUpdateSubmit( + start_url, + self.admin, + {"flow": flow.id, "contact_search": get_contact_search(query=query)}, + ) + + start = FlowStart.objects.get() + + self.assertEqual(query, start.query) + self.assertEqual(flow, start.flow) + self.assertEqual(FlowStart.TYPE_MANUAL, start.start_type) + self.assertEqual(FlowStart.STATUS_PENDING, start.status) + self.assertEqual({}, start.exclusions) + + self.assertEqual(2, len(mr_mocks.queued_batch_tasks)) + self.assertEqual("start_flow", mr_mocks.queued_batch_tasks[1]["type"]) + + FlowStart.objects.all().delete() + + @mock_mailroom + def test_broadcast_background_flow(self, mr_mocks): + flow = self.create_flow("Background", flow_type=Flow.TYPE_BACKGROUND) + + # create flow start with a query + mr_mocks.contact_parse_query("frank", cleaned='name ~ "frank"') + + start_url = f"{reverse('flows.flow_start', args=[])}?flow={flow.id}" + self.assertUpdateSubmit( + start_url, self.admin, {"flow": flow.id, "contact_search": get_contact_search(query="frank")} + ) + + start = FlowStart.objects.get() + self.assertEqual(flow, start.flow) + self.assertEqual(FlowStart.STATUS_PENDING, start.status) + self.assertEqual({}, start.exclusions) + self.assertEqual('name ~ "frank"', start.query) + + def test_copy_view(self): + flow = self.get_flow("color") + + self.login(self.admin) + + response = self.client.post(reverse("flows.flow_copy", args=[flow.id])) + + flow_copy = Flow.objects.get(org=self.org, name="Copy of %s" % flow.name) + + self.assertRedirect(response, reverse("flows.flow_editor", args=[flow_copy.uuid])) + + def test_recent_contacts(self): + flow = self.create_flow("Test") + contact1 = self.create_contact("Bob", phone="0979111111") + contact2 = self.create_contact("", phone="0979222222") + node1_exit1_uuid = "805f5073-ce96-4b6a-ab9f-e77dd412f83b" + node2_uuid = "fcc47dc4-306b-4b2f-ad72-7e53f045c3c4" + + seg1_url = reverse("flows.flow_recent_contacts", args=[flow.uuid, node1_exit1_uuid, node2_uuid]) + + # nothing set in redis just means empty list + self.assertRequestDisallowed(seg1_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(seg1_url, [self.user, self.editor, self.admin]) + self.assertEqual([], response.json()) + + def add_recent_contact(exit_uuid: str, dest_uuid: str, contact, text: str, ts: float): + r = get_redis_connection() + member = f"{uuid4()}|{contact.id}|{text}" # text is prefixed with a random value to keep it unique + r.zadd(f"recent_contacts:{exit_uuid}:{dest_uuid}", mapping={member: ts}) + + add_recent_contact(node1_exit1_uuid, node2_uuid, contact1, "Hi there", 1639338554.969123) + add_recent_contact(node1_exit1_uuid, node2_uuid, contact2, "|x|", 1639338555.234567) + add_recent_contact(node1_exit1_uuid, node2_uuid, contact1, "Sounds good", 1639338561.345678) + + response = self.assertReadFetch(seg1_url, [self.user, self.editor, self.admin]) + self.assertEqual( + [ + { + "contact": {"uuid": str(contact1.uuid), "name": "Bob"}, + "operand": "Sounds good", + "time": "2021-12-12T19:49:21.345678+00:00", + }, + { + "contact": {"uuid": str(contact2.uuid), "name": "0979 222 222"}, + "operand": "|x|", + "time": "2021-12-12T19:49:15.234567+00:00", + }, + { + "contact": {"uuid": str(contact1.uuid), "name": "Bob"}, + "operand": "Hi there", + "time": "2021-12-12T19:49:14.969123+00:00", + }, + ], + response.json(), + ) + + def test_category_counts(self): + flow1 = self.create_flow("Test 1") + + counts_url = reverse("flows.flow_category_counts", args=[flow1.id]) + + self.assertRequestDisallowed(counts_url, [None, self.agent]) + + # check with no data + response = self.assertReadFetch(counts_url, [self.user, self.editor, self.admin]) + self.assertEqual({"counts": []}, response.json()) + + # simulate some category data + flow1.metadata["results"] = [{"key": "color", "name": "Color"}, {"key": "beer", "name": "Beer"}] + flow1.save(update_fields=("metadata",)) + + flow1.category_counts.create( + node_uuid="9b00751c-0d46-4e5f-86b1-7ccfae76ea10", + result_key="color", + result_name="Color", + category_name="Red", + count=3, + ) + flow1.category_counts.create( + node_uuid="9b00751c-0d46-4e5f-86b1-7ccfae76ea10", + result_key="color", + result_name="Color", + category_name="Blue", + count=2, + ) + flow1.category_counts.create( + node_uuid="9b00751c-0d46-4e5f-86b1-7ccfae76ea10", + result_key="color", + result_name="Color", + category_name="Other", + count=1, + ) + flow1.category_counts.create( + node_uuid="300fd49b-c69d-4e8c-aba9-b6036d0b83d9", + result_key="beer", + result_name="Beer", + category_name="Primus", + count=7, + ) + + response = self.assertReadFetch(counts_url, [self.user, self.editor, self.admin]) + self.assertEqual( + { + "counts": [ + { + "key": "color", + "name": "Color", + "categories": [ + {"name": "Blue", "count": 2, "pct": 0.3333333333333333}, + {"name": "Other", "count": 1, "pct": 0.16666666666666666}, + {"name": "Red", "count": 3, "pct": 0.5}, + ], + "total": 6, + }, + { + "key": "beer", + "name": "Beer", + "categories": [ + {"name": "Primus", "count": 7, "pct": 1.0}, + ], + "total": 7, + }, + ] + }, + response.json(), + ) + + def test_results(self): + flow = self.create_flow("Test 1") + + results_url = reverse("flows.flow_results", args=[flow.id]) + + self.assertRequestDisallowed(results_url, [None, self.agent]) + self.assertReadFetch(results_url, [self.user, self.editor, self.admin]) + + flow.release(self.admin) + + response = self.requestView(results_url, self.admin) + self.assertEqual(404, response.status_code) + + @patch("django.utils.timezone.now") + def test_engagement(self, mock_now): + # this test runs as if it's 2024-11-25 12:05:00 + mock_now.return_value = datetime(2024, 11, 25, 12, 5, 0, tzinfo=tzone.utc) + + flow1 = self.create_flow("Test 1") + + engagement_url = reverse("flows.flow_engagement", args=[flow1.id]) + + # check fetching as template + self.assertRequestDisallowed(engagement_url, [None, self.agent]) + self.assertReadFetch(engagement_url, [self.user, self.editor, self.admin]) + + # check fetching as chart data (when there's no data) + response = self.requestView(engagement_url, self.admin, HTTP_ACCEPT="application/json") + self.assertEqual( + { + "timeline": { + "data": [], + "xmin": 1729900800000, # 2024-10-26 + "xmax": 1732492800000, # 2024-11-25 + "ymax": 0, + }, + "dow": { + "data": [ + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + ] + }, + "hod": {"data": [[i, 0] for i in range(24)]}, + "completion": { + "summary": [ + {"name": "Active", "y": 0, "drilldown": None, "color": "#2387CA"}, + {"name": "Completed", "y": 0, "drilldown": None, "color": "#8FC93A"}, + { + "name": "Interrupted, Expired and Failed", + "y": 0, + "drilldown": "incomplete", + "color": "#CCC", + }, + ], + "drilldown": [ + { + "name": "Interrupted, Expired and Failed", + "id": "incomplete", + "innerSize": "50%", + "data": [ + {"name": "Expired", "y": 0, "color": "#CCC"}, + {"name": "Interrupted", "y": 0, "color": "#EEE"}, + {"name": "Failed", "y": 0, "color": "#FEE"}, + ], + } + ], + }, + }, + response.json(), + ) + + def engagement(flow, when, count): + flow.counts.create(scope=f"msgsin:hour:{when.hour}", count=count) + flow.counts.create(scope=f"msgsin:dow:{when.isoweekday()}", count=count) + flow.counts.create(scope=f"msgsin:date:{when.date().isoformat()}", count=count) + + engagement(flow1, datetime(2024, 11, 24, 9, 0, 0, tzinfo=tzone.utc), 3) # 2024-11-24 09:00 (Sun) + engagement(flow1, datetime(2024, 11, 25, 12, 0, 0, tzinfo=tzone.utc), 2) # 2024-11-25 12:00 (Mon) + engagement(flow1, datetime(2024, 11, 26, 9, 0, 0, tzinfo=tzone.utc), 4) # 2024-11-26 09:00 (Tue) + engagement(flow1, datetime(2024, 11, 26, 23, 0, 0, tzinfo=tzone.utc), 1) # 2024-11-26 23:00 (Tue) + + flow1.counts.create(scope="status:W", count=4) + flow1.counts.create(scope="status:C", count=3) + flow1.counts.create(scope="status:X", count=2) + flow1.counts.create(scope="status:I", count=1) + + response = self.requestView(engagement_url, self.admin, HTTP_ACCEPT="application/json") + self.assertEqual( + { + "timeline": { + "data": [[1732406400000, 3], [1732492800000, 2], [1732579200000, 5]], + "xmin": 1729900800000, # 2024-10-26 + "xmax": 1732492800000, + "ymax": 5, + }, + "dow": { + "data": [ + {"msgs": 3, "y": 30.0}, + {"msgs": 2, "y": 20.0}, + {"msgs": 5, "y": 50.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + {"msgs": 0, "y": 0.0}, + ] + }, + "hod": { + "data": [ + [0, 0], + [1, 1], # 23:00 UTC is 01:00 in Kigali + [2, 0], + [3, 0], + [4, 0], + [5, 0], + [6, 0], + [7, 0], + [8, 0], + [9, 0], + [10, 0], + [11, 7], + [12, 0], + [13, 0], + [14, 2], + [15, 0], + [16, 0], + [17, 0], + [18, 0], + [19, 0], + [20, 0], + [21, 0], + [22, 0], + [23, 0], + ] + }, + "completion": { + "summary": [ + {"name": "Active", "y": 4, "drilldown": None, "color": "#2387CA"}, + {"name": "Completed", "y": 3, "drilldown": None, "color": "#8FC93A"}, + { + "name": "Interrupted, Expired and Failed", + "y": 3, + "drilldown": "incomplete", + "color": "#CCC", + }, + ], + "drilldown": [ + { + "name": "Interrupted, Expired and Failed", + "id": "incomplete", + "innerSize": "50%", + "data": [ + {"name": "Expired", "y": 2, "color": "#CCC"}, + {"name": "Interrupted", "y": 1, "color": "#EEE"}, + {"name": "Failed", "y": 0, "color": "#FEE"}, + ], + } + ], + }, + }, + response.json(), + ) + + # simulate having some data from 6 months ago + engagement(flow1, datetime(2024, 5, 1, 12, 0, 0, tzinfo=tzone.utc), 4) # 2024-05-01 12:00 (Wed) + + response = self.requestView(engagement_url, self.admin, HTTP_ACCEPT="application/json") + resp_json = response.json() + self.assertEqual(1714521600000, resp_json["timeline"]["xmin"]) # 2024-05-01 + self.assertEqual( + [[1714521600000, 4], [1732406400000, 3], [1732492800000, 2], [1732579200000, 5]], + resp_json["timeline"]["data"], + ) + + # simulate having some data from 18 months ago (should trigger bucketing by week) + engagement(flow1, datetime(2023, 5, 1, 12, 0, 0, tzinfo=tzone.utc), 3) # 2023-05-01 12:00 (Mon) + + response = self.requestView(engagement_url, self.admin, HTTP_ACCEPT="application/json") + resp_json = response.json() + self.assertEqual(1682899200000, resp_json["timeline"]["xmin"]) # 2023-05-01 + self.assertEqual( + [ + [1682899200000, 3], # 2023-05-01 (Mon) + [1714348800000, 4], # 2024-04-29 (Mon) + [1731888000000, 3], # 2024-11-18 (Mon) + [1732492800000, 7], # 2024-11-25 (Mon) + ], + resp_json["timeline"]["data"], + ) + + # simulate having some data from 4 years ago (should trigger bucketing by month) + engagement(flow1, datetime(2020, 11, 25, 12, 0, 0, tzinfo=tzone.utc), 6) # 2020-11-25 12:00 (Wed) + + response = self.requestView(engagement_url, self.admin, HTTP_ACCEPT="application/json") + resp_json = response.json() + self.assertEqual(1606262400000, resp_json["timeline"]["xmin"]) # 2020-11-25 + self.assertEqual( + [ + [1604188800000, 6], # 2020-11-01 + [1682899200000, 3], # 2023-05-01 + [1714521600000, 4], # 2024-05-01 + [1730419200000, 10], # 2024-11-01 + ], + resp_json["timeline"]["data"], + ) + + # check 404 for inactive flow + flow1.release(self.admin) + + response = self.requestView(engagement_url, self.admin) + self.assertEqual(404, response.status_code) + + def test_activity(self): + flow1 = self.create_flow("Test 1") + flow2 = self.create_flow("Test 2") + + flow1.counts.create(scope="node:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=4) + flow1.counts.create(scope="node:400d6b5e-c963-42a1-a06c-50bb9b1e38b1", count=5) + + flow1.counts.create( + scope="segment:1fff74f4-c81f-4f4c-a03d-58d113c17da1:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=3 + ) + flow1.counts.create( + scope="segment:1fff74f4-c81f-4f4c-a03d-58d113c17da1:01c175da-d23d-40a4-a845-c4a9bb4b481a", count=4 + ) + flow1.counts.create( + scope="segment:6f607948-f3f0-4a6a-94b8-7fdd877895ca:400d6b5e-c963-42a1-a06c-50bb9b1e38b1", count=5 + ) + flow2.counts.create( + scope="segment:a4fe3ada-b062-47e4-be58-bcbe1bca31b4:74a53ff4-fe63-4d89-875e-cae3caca177c", count=6 + ) + + activity_url = reverse("flows.flow_activity", args=[flow1.uuid]) + + self.assertRequestDisallowed(activity_url, [None, self.agent]) + + response = self.assertReadFetch(activity_url, [self.user, self.editor, self.admin]) + self.assertEqual( + { + "nodes": {"01c175da-d23d-40a4-a845-c4a9bb4b481a": 4, "400d6b5e-c963-42a1-a06c-50bb9b1e38b1": 5}, + "segments": { + "1fff74f4-c81f-4f4c-a03d-58d113c17da1:01c175da-d23d-40a4-a845-c4a9bb4b481a": 7, + "6f607948-f3f0-4a6a-94b8-7fdd877895ca:400d6b5e-c963-42a1-a06c-50bb9b1e38b1": 5, + }, + }, + response.json(), + ) + + def test_write_protection(self): + flow = self.get_flow("favorites_v13") + flow_json = flow.get_definition() + flow_json_copy = flow_json.copy() + + self.assertEqual(1, flow_json["revision"]) + + self.login(self.admin) + + # saving should work + flow.save_revision(self.admin, flow_json) + + self.assertEqual(2, flow_json["revision"]) + + # we can't save with older revision number + with self.assertRaises(FlowUserConflictException): + flow.save_revision(self.admin, flow_json_copy) + + # make flow definition invalid by creating a duplicate node UUID + mode0_uuid = flow_json["nodes"][0]["uuid"] + flow_json["nodes"][1]["uuid"] = mode0_uuid + + with self.assertRaises(mailroom.FlowValidationException) as cm: + flow.save_revision(self.admin, flow_json) + + self.assertEqual(f"node UUID {mode0_uuid} isn't unique", str(cm.exception)) + + # check view converts exception to error response + response = self.client.post( + reverse("flows.flow_revisions", args=[flow.uuid]), data=flow_json, content_type="application/json" + ) + + self.assertEqual(400, response.status_code) + self.assertEqual( + { + "status": "failure", + "description": "Your flow failed validation. Please refresh your browser.", + "detail": f"node UUID {mode0_uuid} isn't unique", + }, + response.json(), + ) + + def test_change_language(self): + self.org.set_flow_languages(self.admin, ["eng", "spa", "ara"]) + + flow = self.get_flow("favorites_v13") + + change_url = reverse("flows.flow_change_language", args=[flow.id]) + + self.assertUpdateSubmit( + change_url, + self.admin, + {"language": ""}, + form_errors={"language": "This field is required."}, + object_unchanged=flow, + ) + + self.assertUpdateSubmit( + change_url, + self.admin, + {"language": "fra"}, + form_errors={"language": "Not a valid language."}, + object_unchanged=flow, + ) + + self.assertUpdateSubmit(change_url, self.admin, {"language": "spa"}, success_status=302) + + flow_def = flow.get_definition() + self.assertIn("eng", flow_def["localization"]) + self.assertEqual("¿Cuál es tu color favorito?", flow_def["nodes"][0]["actions"][0]["text"]) + + def test_export_results(self): + export_url = reverse("flows.flow_export_results") + + flow1 = self.create_flow("Test 1") + flow2 = self.create_flow("Test 2") + testers = self.create_group("Testers", contacts=[]) + gender = self.create_field("gender", "Gender") + + self.assertRequestDisallowed(export_url, [None, self.agent]) + response = self.assertUpdateFetch( + export_url + f"?ids={flow1.id},{flow2.id}", + [self.user, self.editor, self.admin], + form_fields=( + "start_date", + "end_date", + "with_fields", + "with_groups", + "flows", + "extra_urns", + "responded_only", + ), + ) + self.assertNotContains(response, "already an export in progress") + + # anon orgs don't see urns option + with self.anonymous(self.org): + response = self.client.get(export_url) + self.assertEqual( + ["start_date", "end_date", "with_fields", "with_groups", "flows", "responded_only", "loc"], + list(response.context["form"].fields.keys()), + ) + + # create a dummy export task so that we won't be able to export + blocking_export = ResultsExport.create( + self.org, self.admin, start_date=date.today() - timedelta(days=7), end_date=date.today() + ) + + response = self.client.get(export_url) + self.assertContains(response, "already an export in progress") + + # check we can't submit in case a user opens the form and whilst another user is starting an export + response = self.client.post( + export_url, {"start_date": "2022-06-28", "end_date": "2022-09-28", "flows": [flow1.id]} + ) + self.assertContains(response, "already an export in progress") + self.assertEqual(1, Export.objects.count()) + + # mark that one as finished so it's no longer a blocker + blocking_export.status = Export.STATUS_COMPLETE + blocking_export.save(update_fields=("status",)) + + # try to submit with no values + response = self.client.post(export_url, {}) + self.assertFormError(response.context["form"], "start_date", "This field is required.") + self.assertFormError(response.context["form"], "end_date", "This field is required.") + self.assertFormError(response.context["form"], "flows", "This field is required.") + + response = self.client.post( + export_url, + { + "start_date": "2022-06-28", + "end_date": "2022-09-28", + "flows": [flow1.id], + "with_groups": [testers.id], + "with_fields": [gender.id], + }, + ) + self.assertEqual(200, response.status_code) + + export = Export.objects.exclude(id=blocking_export.id).get() + self.assertEqual("results", export.export_type) + self.assertEqual(date(2022, 6, 28), export.start_date) + self.assertEqual(date(2022, 9, 28), export.end_date) + self.assertEqual( + { + "flow_ids": [flow1.id], + "with_groups": [testers.id], + "with_fields": [gender.id], + "extra_urns": [], + "responded_only": False, + }, + export.config, + ) + + def test_export_and_download_translation(self): + self.org.set_flow_languages(self.admin, ["spa"]) + + flow = self.get_flow("favorites") + export_url = reverse("flows.flow_export_translation", args=[flow.id]) + + self.assertRequestDisallowed(export_url, [None, self.agent, self.admin2]) + self.assertUpdateFetch(export_url, [self.user, self.editor, self.admin], form_fields=["language"]) + + # submit with no language + response = self.assertUpdateSubmit(export_url, self.admin, {}, success_status=200) + + download_url = response["X-Temba-Success"] + self.assertEqual(f"/flow/download_translation/?flow={flow.id}&language=", download_url) + + # check fetching the PO from the download link + with patch("temba.mailroom.client.client.MailroomClient.po_export") as mock_po_export: + mock_po_export.return_value = b'msgid "Red"\nmsgstr "Roja"\n\n' + self.assertRequestDisallowed(download_url, [None, self.agent, self.admin2]) + response = self.assertReadFetch(download_url, [self.user, self.editor, self.admin]) + + self.assertEqual(b'msgid "Red"\nmsgstr "Roja"\n\n', response.content) + self.assertEqual('attachment; filename="favorites.po"', response["Content-Disposition"]) + self.assertEqual("text/x-gettext-translation", response["Content-Type"]) + + # submit with a language + response = self.assertUpdateSubmit(export_url, self.admin, {"language": "spa"}, success_status=200) + + download_url = response["X-Temba-Success"] + self.assertEqual(f"/flow/download_translation/?flow={flow.id}&language=spa", download_url) + + # check fetching the PO from the download link + with patch("temba.mailroom.client.client.MailroomClient.po_export") as mock_po_export: + mock_po_export.return_value = b'msgid "Red"\nmsgstr "Roja"\n\n' + response = self.requestView(download_url, self.admin) + + # filename includes language now + self.assertEqual('attachment; filename="favorites.spa.po"', response["Content-Disposition"]) + + def test_import_translation(self): + self.org.set_flow_languages(self.admin, ["eng", "spa"]) + + flow = self.get_flow("favorites_v13") + step1_url = reverse("flows.flow_import_translation", args=[flow.id]) + + # check step 1 is just a file upload + self.assertRequestDisallowed(step1_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch(step1_url, [self.editor, self.admin], form_fields=["po_file"]) + + # submit with no file + self.assertUpdateSubmit( + step1_url, self.admin, {}, form_errors={"po_file": "This field is required."}, object_unchanged=flow + ) + + # submit with something that's empty + response = self.requestView(step1_url, self.admin, post_data={"po_file": io.BytesIO(b"")}) + self.assertFormError(response.context["form"], "po_file", "The submitted file is empty.") + + # submit with something that's not a valid PO file + response = self.requestView(step1_url, self.admin, post_data={"po_file": io.BytesIO(b"msgid")}) + self.assertFormError(response.context["form"], "po_file", "File doesn't appear to be a valid PO file.") + + # submit with something that's in the base language of the flow + po_file = io.BytesIO( + b""" +#, fuzzy +msgid "" +msgstr "" +"POT-Creation-Date: 2018-07-06 12:30+0000\\n" +"Language: en\\n" +"Language-3: eng\\n" + +msgid "Blue" +msgstr "Bluuu" + """ + ) + response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) + self.assertFormError( + response.context["form"], + "po_file", + "Contains translations in English which is the base language of this flow.", + ) + + # submit with something that's in the base language of the flow + po_file = io.BytesIO( + b""" +#, fuzzy +msgid "" +msgstr "" +"POT-Creation-Date: 2018-07-06 12:30+0000\\n" +"Language: fr\\n" +"Language-3: fra\\n" + +msgid "Blue" +msgstr "Bleu" + """ + ) + response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) + self.assertFormError( + response.context["form"], + "po_file", + "Contains translations in French which is not a supported translation language.", + ) + + # submit with something that doesn't have an explicit language + po_file = io.BytesIO( + b""" +msgid "Blue" +msgstr "Azul" + """ + ) + response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) + + self.assertEqual(302, response.status_code) + self.assertIn(f"/flow/import_translation/{flow.id}/?po=", response.url) + + response = self.assertUpdateFetch(response.url, [self.admin], form_fields=["language"]) + self.assertContains(response, "Unknown") + + # submit a different PO that does have language set + po_file = io.BytesIO( + b""" +#, fuzzy +msgid "" +msgstr "" +"POT-Creation-Date: 2018-07-06 12:30+0000\\n" +"Language: es\\n" +"MIME-Version: 1.0\\n" +"Content-Type: text/plain; charset=UTF-8\\n" +"Language-3: spa\\n" + +#: Favorites/8720f157-ca1c-432f-9c0b-2014ddc77094/name:0 +#: Favorites/a4d15ed4-5b24-407f-b86e-4b881f09a186/arguments:0 +msgid "Blue" +msgstr "Azul" +""" + ) + response = self.requestView(step1_url, self.admin, post_data={"po_file": po_file}) + + self.assertEqual(302, response.status_code) + self.assertIn(f"/flow/import_translation/{flow.id}/?po=", response.url) + + step2_url = response.url + + response = self.assertUpdateFetch(step2_url, [self.admin], form_fields=["language"]) + self.assertContains(response, "Spanish (spa)") + self.assertEqual({"language": "spa"}, response.context["form"].initial) + + # confirm the import + with patch("temba.mailroom.client.client.MailroomClient.po_import") as mock_po_import: + mock_po_import.return_value = {"flows": [flow.get_definition()]} + + response = self.requestView(step2_url, self.admin, post_data={"language": "spa"}) + + # should redirect back to editor + self.assertEqual(302, response.status_code) + self.assertEqual(f"/flow/editor/{flow.uuid}/", response.url) + + # should have a new revision + self.assertEqual(2, flow.revisions.count()) diff --git a/temba/flows/tests/test_label.py b/temba/flows/tests/test_label.py new file mode 100644 index 00000000000..0bb05c622d0 --- /dev/null +++ b/temba/flows/tests/test_label.py @@ -0,0 +1,25 @@ +from temba.flows.models import FlowLabel +from temba.tests import TembaTest + + +class FlowLabelTest(TembaTest): + def test_model(self): + label = FlowLabel.create(self.org, self.admin, "Cool Flows") + self.assertEqual("Cool Flows", label.name) + + # can't create with invalid name + with self.assertRaises(AssertionError): + FlowLabel.create(self.org, self.admin, '"Cool"') + + # can't create with duplicate name + with self.assertRaises(AssertionError): + FlowLabel.create(self.org, self.admin, "Cool Flows") + + flow1 = self.create_flow("Flow 1") + flow2 = self.create_flow("Flow 2") + + label.toggle_label([flow1, flow2], add=True) + self.assertEqual({flow1, flow2}, set(label.get_flows())) + + label.toggle_label([flow1], add=False) + self.assertEqual({flow2}, set(label.get_flows())) diff --git a/temba/flows/tests/test_labelcrudl.py b/temba/flows/tests/test_labelcrudl.py new file mode 100644 index 00000000000..ddb41db41f0 --- /dev/null +++ b/temba/flows/tests/test_labelcrudl.py @@ -0,0 +1,72 @@ +from django.urls import reverse + +from temba.flows.models import FlowLabel +from temba.tests import CRUDLTestMixin, TembaTest + + +class FlowLabelCRUDLTest(TembaTest, CRUDLTestMixin): + def test_create(self): + create_url = reverse("flows.flowlabel_create") + + self.assertRequestDisallowed(create_url, [None, self.user, self.agent]) + self.assertCreateFetch(create_url, [self.editor, self.admin], form_fields=("name", "flows")) + + # try to submit without a name + self.assertCreateSubmit(create_url, self.admin, {}, form_errors={"name": "This field is required."}) + + # try to submit with an invalid name + self.assertCreateSubmit( + create_url, self.admin, {"name": '"Cool"\\'}, form_errors={"name": 'Cannot contain the character: "'} + ) + + self.assertCreateSubmit( + create_url, + self.admin, + {"name": "Cool Flows"}, + new_obj_query=FlowLabel.objects.filter(org=self.org, name="Cool Flows"), + ) + + # try to create with a name that's already used + self.assertCreateSubmit(create_url, self.admin, {"name": "Cool Flows"}, form_errors={"name": "Must be unique."}) + + def test_update(self): + label = FlowLabel.create(self.org, self.admin, "Cool Flows") + FlowLabel.create(self.org, self.admin, "Crazy Flows") + + update_url = reverse("flows.flowlabel_update", args=[label.id]) + + self.assertRequestDisallowed(update_url, [None, self.user, self.agent, self.admin2]) + self.assertUpdateFetch(update_url, [self.editor, self.admin], form_fields=("name", "flows")) + + # try to update to an invalid name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": '"Cool"\\'}, + form_errors={"name": 'Cannot contain the character: "'}, + object_unchanged=label, + ) + + # try to update to a non-unique name + self.assertUpdateSubmit( + update_url, + self.admin, + {"name": "Crazy Flows"}, + form_errors={"name": "Must be unique."}, + object_unchanged=label, + ) + + self.assertUpdateSubmit(update_url, self.admin, {"name": "Super Cool Flows"}) + + label.refresh_from_db() + self.assertEqual("Super Cool Flows", label.name) + + def test_delete(self): + label = FlowLabel.create(self.org, self.admin, "Cool Flows") + + delete_url = reverse("flows.flowlabel_delete", args=[label.id]) + + self.assertRequestDisallowed(delete_url, [None, self.user, self.agent, self.admin2]) + + self.assertDeleteFetch(delete_url, [self.editor, self.admin]) + self.assertDeleteSubmit(delete_url, self.admin, object_deleted=label, success_status=200) diff --git a/temba/flows/tests/test_misc.py b/temba/flows/tests/test_misc.py new file mode 100644 index 00000000000..6e227d2b5ed --- /dev/null +++ b/temba/flows/tests/test_misc.py @@ -0,0 +1,10 @@ +from temba.tests import TembaTest + + +class AssetServerTest(TembaTest): + def test_languages(self): + self.login(self.admin) + response = self.client.get("/flow/assets/%d/1234/language/" % self.org.id) + self.assertEqual( + response.json(), {"results": [{"iso": "eng", "name": "English"}, {"iso": "kin", "name": "Kinyarwanda"}]} + ) diff --git a/temba/flows/tests/test_revision.py b/temba/flows/tests/test_revision.py new file mode 100644 index 00000000000..6cfc21dc9b6 --- /dev/null +++ b/temba/flows/tests/test_revision.py @@ -0,0 +1,96 @@ +from datetime import timedelta + +from django.db.models.functions import TruncDate +from django.utils import timezone + +from temba.flows.models import FlowRevision +from temba.flows.tasks import trim_flow_revisions +from temba.tests import TembaTest + + +class FlowRevisionTest(TembaTest): + def test_validate_legacy_definition(self): + def validate(flow_def: dict, expected_error: str): + with self.assertRaises(ValueError) as cm: + FlowRevision.validate_legacy_definition(flow_def) + self.assertEqual(expected_error, str(cm.exception)) + + validate({"flow_type": "U", "nodes": []}, "unsupported flow type") + validate(self.load_json("test_flows/legacy/invalid/not_fully_localized.json"), "non-localized flow definition") + + # base_language of null, but spec version 8 + validate(self.load_json("test_flows/legacy/invalid/no_base_language_v8.json"), "non-localized flow definition") + + # base_language of 'eng' but non localized actions + validate( + self.load_json("test_flows/legacy/invalid/non_localized_with_language.json"), + "non-localized flow definition", + ) + + validate( + self.load_json("test_flows/legacy/invalid/non_localized_ruleset.json"), "non-localized flow definition" + ) + + def test_trim_revisions(self): + start = timezone.now() + + flow1 = self.create_flow("Flow 1") + flow2 = self.create_flow("Flow 2") + + revision = 100 + FlowRevision.objects.all().update(revision=revision) + + # create a single old clinic revision + FlowRevision.objects.create( + flow=flow2, + definition=dict(), + revision=99, + created_on=timezone.now() - timedelta(days=7), + created_by=self.admin, + ) + + # make a bunch of revisions for flow 1 on the same day + created = timezone.now().replace(hour=6) - timedelta(days=1) + for i in range(25): + revision -= 1 + created = created - timedelta(minutes=1) + FlowRevision.objects.create( + flow=flow1, definition=dict(), revision=revision, created_by=self.admin, created_on=created + ) + + # then for 5 days prior, make a few more + for i in range(5): + created = created - timedelta(days=1) + for i in range(10): + revision -= 1 + created = created - timedelta(minutes=1) + FlowRevision.objects.create( + flow=flow1, definition=dict(), revision=revision, created_by=self.admin, created_on=created + ) + + # trim our flow revisions, should be left with original (today), 25 from yesterday, 1 per day for 5 days = 31 + self.assertEqual(76, FlowRevision.objects.filter(flow=flow1).count()) + self.assertEqual(45, FlowRevision.trim(start)) + self.assertEqual(31, FlowRevision.objects.filter(flow=flow1).count()) + self.assertEqual( + 7, + FlowRevision.objects.filter(flow=flow1) + .annotate(created_date=TruncDate("created_on")) + .distinct("created_date") + .count(), + ) + + # trim our clinic flow manually, should remain unchanged + self.assertEqual(2, FlowRevision.objects.filter(flow=flow2).count()) + self.assertEqual(0, FlowRevision.trim_for_flow(flow2.id)) + self.assertEqual(2, FlowRevision.objects.filter(flow=flow2).count()) + + # call our task + trim_flow_revisions() + self.assertEqual(2, FlowRevision.objects.filter(flow=flow2).count()) + self.assertEqual(31, FlowRevision.objects.filter(flow=flow1).count()) + + # call again (testing reading redis key) + trim_flow_revisions() + self.assertEqual(2, FlowRevision.objects.filter(flow=flow2).count()) + self.assertEqual(31, FlowRevision.objects.filter(flow=flow1).count()) diff --git a/temba/flows/tests/test_run.py b/temba/flows/tests/test_run.py new file mode 100644 index 00000000000..24df2c75f93 --- /dev/null +++ b/temba/flows/tests/test_run.py @@ -0,0 +1,370 @@ +from datetime import datetime, timedelta, timezone as tzone +from unittest.mock import patch +from uuid import UUID + +from django.utils import timezone + +from temba.flows.models import FlowRun, FlowSession, FlowStart, FlowStartCount +from temba.tests import TembaTest, matchers +from temba.tests.engine import MockSessionWriter +from temba.utils.uuid import uuid4 + + +class FlowRunTest(TembaTest): + def setUp(self): + super().setUp() + + self.contact = self.create_contact("Ben Haggerty", phone="+250788123123") + + def test_get_path(self): + flow = self.create_flow("Test") + session = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=self.contact, + status=FlowSession.STATUS_COMPLETED, + output_url="http://sessions.com/123.json", + ended_on=timezone.now(), + wait_resume_on_expire=False, + ) + + # create run with old style path JSON + run = FlowRun.objects.create( + uuid=uuid4(), + org=self.org, + session=session, + flow=flow, + contact=self.contact, + status=FlowRun.STATUS_WAITING, + path=[ + { + "uuid": "b5c3421c-3bbb-4dc7-9bda-683456588a6d", + "node_uuid": "857a1498-3d5f-40f5-8185-2ce596ce2677", + "arrived_on": "2021-12-20T08:47:30.123Z", + "exit_uuid": "6fc14d2c-3b4d-49c7-b342-4b2b2ebf7678", + }, + { + "uuid": "4a254612-8437-47e1-b7bd-feb97ee60bf6", + "node_uuid": "59d992c6-c491-473d-a7e9-4f431d705c01", + "arrived_on": "2021-12-20T08:47:30.234Z", + "exit_uuid": None, + }, + ], + current_node_uuid="59d992c6-c491-473d-a7e9-4f431d705c01", + ) + + self.assertEqual( + [ + FlowRun.Step( + node=UUID("857a1498-3d5f-40f5-8185-2ce596ce2677"), + time=datetime(2021, 12, 20, 8, 47, 30, 123000, tzinfo=tzone.utc), + ), + FlowRun.Step( + node=UUID("59d992c6-c491-473d-a7e9-4f431d705c01"), + time=datetime(2021, 12, 20, 8, 47, 30, 234000, tzinfo=tzone.utc), + ), + ], + run.get_path(), + ) + + # create run with new style path fields + run = FlowRun.objects.create( + uuid=uuid4(), + org=self.org, + session=session, + flow=flow, + contact=self.contact, + status=FlowRun.STATUS_WAITING, + path_nodes=[UUID("857a1498-3d5f-40f5-8185-2ce596ce2677"), UUID("59d992c6-c491-473d-a7e9-4f431d705c01")], + path_times=[ + datetime(2021, 12, 20, 8, 47, 30, 123000, tzinfo=tzone.utc), + datetime(2021, 12, 20, 8, 47, 30, 234000, tzinfo=tzone.utc), + ], + current_node_uuid="59d992c6-c491-473d-a7e9-4f431d705c01", + ) + + self.assertEqual( + [ + FlowRun.Step( + node=UUID("857a1498-3d5f-40f5-8185-2ce596ce2677"), + time=datetime(2021, 12, 20, 8, 47, 30, 123000, tzinfo=tzone.utc), + ), + FlowRun.Step( + node=UUID("59d992c6-c491-473d-a7e9-4f431d705c01"), + time=datetime(2021, 12, 20, 8, 47, 30, 234000, tzinfo=tzone.utc), + ), + ], + run.get_path(), + ) + + def test_as_archive_json(self): + flow = self.get_flow("color_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[4] + color_other = flow_nodes[3] + + msg_in = self.create_incoming_msg(self.contact, "green") + + run = ( + MockSessionWriter(self.contact, flow) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=msg_in) + .set_result("Color", "green", "Other", "green") + .visit(color_other) + .send_msg("That is a funny color. Try again.", self.channel) + .visit(color_split) + .wait() + .save() + ).session.runs.get() + + run_json = run.as_archive_json() + + self.assertEqual( + set(run_json.keys()), + set( + [ + "id", + "uuid", + "flow", + "contact", + "responded", + "path", + "values", + "created_on", + "modified_on", + "exited_on", + "exit_type", + ] + ), + ) + + self.assertEqual(run.id, run_json["id"]) + self.assertEqual({"uuid": str(flow.uuid), "name": "Colors"}, run_json["flow"]) + self.assertEqual({"uuid": str(self.contact.uuid), "name": "Ben Haggerty"}, run_json["contact"]) + self.assertTrue(run_json["responded"]) + + self.assertEqual( + [ + {"node": matchers.UUID4String(), "time": matchers.ISODate()}, + {"node": matchers.UUID4String(), "time": matchers.ISODate()}, + {"node": matchers.UUID4String(), "time": matchers.ISODate()}, + {"node": matchers.UUID4String(), "time": matchers.ISODate()}, + ], + run_json["path"], + ) + + self.assertEqual( + { + "color": { + "category": "Other", + "name": "Color", + "node": matchers.UUID4String(), + "time": matchers.ISODate(), + "value": "green", + "input": "green", + } + }, + run_json["values"], + ) + + self.assertEqual(run.created_on.isoformat(), run_json["created_on"]) + self.assertEqual(run.modified_on.isoformat(), run_json["modified_on"]) + self.assertIsNone(run_json["exit_type"]) + self.assertIsNone(run_json["exited_on"]) + + def _check_deletion(self, by_archiver: bool, expected: dict, session_completed=True): + """ + Runs our favorites flow, then deletes the run and asserts our final state + """ + + flow = self.get_flow("favorites_v13") + flow_nodes = flow.get_definition()["nodes"] + color_prompt = flow_nodes[0] + color_split = flow_nodes[2] + beer_prompt = flow_nodes[3] + beer_split = flow_nodes[5] + name_prompt = flow_nodes[6] + name_split = flow_nodes[7] + end_prompt = flow_nodes[8] + + start = FlowStart.create(flow, self.admin, contacts=[self.contact]) + if session_completed: + ( + MockSessionWriter(self.contact, flow, start) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(self.contact, "blue")) + .set_result("Color", "blue", "Blue", "blue") + .visit(beer_prompt, exit_index=2) + .send_msg("Good choice, I like Blue too! What is your favorite beer?") + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(self.contact, "primus")) + .set_result("Beer", "primus", "Primus", "primus") + .visit(name_prompt, exit_index=2) + .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") + .visit(name_split) + .wait() + .resume(msg=self.create_incoming_msg(self.contact, "Ryan Lewis")) + .visit(end_prompt) + .complete() + .save() + ) + else: + ( + MockSessionWriter(self.contact, flow, start) + .visit(color_prompt) + .send_msg("What is your favorite color?", self.channel) + .visit(color_split) + .wait() + .resume(msg=self.create_incoming_msg(self.contact, "blue")) + .set_result("Color", "blue", "Blue", "blue") + .visit(beer_prompt, exit_index=2) + .send_msg("Good choice, I like Blue too! What is your favorite beer?") + .visit(beer_split) + .wait() + .resume(msg=self.create_incoming_msg(self.contact, "primus")) + .set_result("Beer", "primus", "Primus", "primus") + .visit(name_prompt, exit_index=2) + .send_msg("Mmmmm... delicious Turbo King. Lastly, what is your name?") + .visit(name_split) + .wait() + .save() + ) + + run = FlowRun.objects.get(contact=self.contact) + if by_archiver: + super(FlowRun, run).delete() # delete_from_counts left unset + else: + run.delete() # delete_from_counts updated to true + + cat_counts = {c["key"]: c for c in flow.get_category_counts()} + + self.assertEqual(2, len(cat_counts)) + self.assertEqual(expected["red_count"], cat_counts["color"]["categories"][0]["count"]) + self.assertEqual(expected["primus_count"], cat_counts["color"]["categories"][0]["count"]) + + self.assertEqual(expected["start_count"], FlowStartCount.get_count(start)) + self.assertEqual(expected["run_count"], flow.get_run_stats()) + + self.assertFalse(FlowRun.objects.filter(id=run.id).exists()) + + @patch("temba.mailroom.queue_interrupt") + def test_delete_by_user_with_complete_session(self, mock_queue_interrupt): + self._check_deletion( + by_archiver=False, + expected={ + "red_count": 0, + "primus_count": 0, + "start_count": 1, # unchanged + "run_count": { + "total": 0, + "status": { + "active": 0, + "waiting": 0, + "completed": 0, + "expired": 0, + "interrupted": 0, + "failed": 0, + }, + "completion": 0, + }, + }, + ) + self.assertFalse(mock_queue_interrupt.called) + + @patch("temba.mailroom.queue_interrupt") + def test_delete_by_user_without_complete_session(self, mock_queue_interrupt): + self._check_deletion( + by_archiver=False, + expected={ + "red_count": 0, + "primus_count": 0, + "start_count": 1, # unchanged + "run_count": { + "total": 0, + "status": { + "active": 0, + "waiting": 0, + "completed": 0, + "expired": 0, + "interrupted": 0, + "failed": 0, + }, + "completion": 0, + }, + }, + session_completed=False, + ) + mock_queue_interrupt.assert_called_once() + + @patch("temba.mailroom.queue_interrupt") + def test_delete_by_archiver(self, mock_queue_interrupt): + self._check_deletion( + by_archiver=True, + expected={ + "red_count": 1, + "primus_count": 1, + "start_count": 1, # unchanged + "run_count": { # unchanged + "total": 1, + "status": { + "active": 0, + "waiting": 0, + "completed": 1, + "expired": 0, + "interrupted": 0, + "failed": 0, + }, + "completion": 100, + }, + }, + ) + self.assertFalse(mock_queue_interrupt.called) + + def test_big_ids(self): + # create a session and run with big ids + session = FlowSession.objects.create( + id=3_000_000_000, + uuid=uuid4(), + org=self.org, + contact=self.contact, + status=FlowSession.STATUS_WAITING, + output_url="http://sessions.com/123.json", + created_on=timezone.now(), + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + FlowRun.objects.create( + id=4_000_000_000, + uuid=uuid4(), + org=self.org, + session=session, + flow=self.create_flow("Test"), + contact=self.contact, + status=FlowRun.STATUS_WAITING, + created_on=timezone.now(), + modified_on=timezone.now(), + path=[ + { + "uuid": "b5c3421c-3bbb-4dc7-9bda-683456588a6d", + "node_uuid": "857a1498-3d5f-40f5-8185-2ce596ce2677", + "arrived_on": "2021-12-20T08:47:30.123Z", + "exit_uuid": "6fc14d2c-3b4d-49c7-b342-4b2b2ebf7678", + }, + { + "uuid": "4a254612-8437-47e1-b7bd-feb97ee60bf6", + "node_uuid": "59d992c6-c491-473d-a7e9-4f431d705c01", + "arrived_on": "2021-12-20T08:47:30.234Z", + "exit_uuid": None, + }, + ], + current_node_uuid="59d992c6-c491-473d-a7e9-4f431d705c01", + ) diff --git a/temba/flows/tests/test_runcrudl.py b/temba/flows/tests/test_runcrudl.py new file mode 100644 index 00000000000..6e873d085f1 --- /dev/null +++ b/temba/flows/tests/test_runcrudl.py @@ -0,0 +1,40 @@ +from django.urls import reverse +from django.utils import timezone + +from temba.flows.models import FlowRun +from temba.tests import CRUDLTestMixin, TembaTest +from temba.utils.uuid import uuid4 + + +class FlowRunCRUDLTest(TembaTest, CRUDLTestMixin): + def test_delete(self): + contact = self.create_contact("Ann", phone="+1234567890") + flow = self.create_flow("Test") + + run1 = FlowRun.objects.create( + uuid=uuid4(), + org=self.org, + flow=flow, + contact=contact, + status=FlowRun.STATUS_COMPLETED, + created_on=timezone.now(), + modified_on=timezone.now(), + exited_on=timezone.now(), + ) + run2 = FlowRun.objects.create( + uuid=uuid4(), + org=self.org, + flow=flow, + contact=contact, + status=FlowRun.STATUS_COMPLETED, + created_on=timezone.now(), + modified_on=timezone.now(), + exited_on=timezone.now(), + ) + + delete_url = reverse("flows.flowrun_delete", args=[run1.id]) + + self.assertDeleteSubmit(delete_url, self.admin, object_deleted=run1, success_status=200) + + self.assertFalse(FlowRun.objects.filter(id=run1.id).exists()) + self.assertTrue(FlowRun.objects.filter(id=run2.id).exists()) # unchanged diff --git a/temba/flows/tests/test_session.py b/temba/flows/tests/test_session.py new file mode 100644 index 00000000000..927ea572b21 --- /dev/null +++ b/temba/flows/tests/test_session.py @@ -0,0 +1,141 @@ +from datetime import datetime, timedelta, timezone as tzone + +from django.utils import timezone + +from temba.flows.models import FlowRun, FlowSession +from temba.flows.tasks import interrupt_flow_sessions, trim_flow_sessions +from temba.tests import TembaTest, matchers, mock_mailroom +from temba.utils.uuid import uuid4 + + +class FlowSessionTest(TembaTest): + @mock_mailroom + def test_interrupt(self, mr_mocks): + contact = self.create_contact("Ben Haggerty", phone="+250788123123") + + def create_session(org, created_on: datetime): + return FlowSession.objects.create( + uuid=uuid4(), + org=org, + contact=contact, + created_on=created_on, + output_url="http://sessions.com/123.json", + status=FlowSession.STATUS_WAITING, + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + + create_session(self.org, timezone.now() - timedelta(days=88)) + session2 = create_session(self.org, timezone.now() - timedelta(days=90)) + session3 = create_session(self.org, timezone.now() - timedelta(days=91)) + session4 = create_session(self.org2, timezone.now() - timedelta(days=92)) + + interrupt_flow_sessions() + + self.assertEqual( + [ + { + "type": "interrupt_sessions", + "org_id": self.org.id, + "queued_on": matchers.Datetime(), + "task": {"session_ids": [session2.id, session3.id]}, + }, + { + "type": "interrupt_sessions", + "org_id": self.org2.id, + "queued_on": matchers.Datetime(), + "task": {"session_ids": [session4.id]}, + }, + ], + mr_mocks.queued_batch_tasks, + ) + + def test_trim(self): + contact = self.create_contact("Ben Haggerty", phone="+250788123123") + flow = self.create_flow("Test") + + # create some runs that have sessions + session1 = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=contact, + output_url="http://sessions.com/123.json", + status=FlowSession.STATUS_WAITING, + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + session2 = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=contact, + output_url="http://sessions.com/234.json", + status=FlowSession.STATUS_WAITING, + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + session3 = FlowSession.objects.create( + uuid=uuid4(), + org=self.org, + contact=contact, + output_url="http://sessions.com/345.json", + status=FlowSession.STATUS_WAITING, + wait_started_on=timezone.now(), + wait_expires_on=timezone.now() + timedelta(days=7), + wait_resume_on_expire=False, + ) + run1 = FlowRun.objects.create( + org=self.org, flow=flow, contact=contact, session=session1, status=FlowRun.STATUS_WAITING + ) + run2 = FlowRun.objects.create( + org=self.org, flow=flow, contact=contact, session=session2, status=FlowRun.STATUS_WAITING + ) + run3 = FlowRun.objects.create( + org=self.org, flow=flow, contact=contact, session=session3, status=FlowRun.STATUS_WAITING + ) + + # create an IVR call with session + call = self.create_incoming_call(flow, contact) + run4 = call.session.runs.get() + + self.assertIsNotNone(run1.session) + self.assertIsNotNone(run2.session) + self.assertIsNotNone(run3.session) + self.assertIsNotNone(run4.session) + + # end run1 and run4's sessions in the past + run1.status = FlowRun.STATUS_COMPLETED + run1.exited_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) + run1.save(update_fields=("status", "exited_on")) + run1.session.status = FlowSession.STATUS_COMPLETED + run1.session.ended_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) + run1.session.save(update_fields=("status", "ended_on")) + + run4.status = FlowRun.STATUS_INTERRUPTED + run4.exited_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) + run4.save(update_fields=("status", "exited_on")) + run4.session.status = FlowSession.STATUS_INTERRUPTED + run4.session.ended_on = datetime(2015, 9, 15, 0, 0, 0, 0, tzone.utc) + run4.session.save(update_fields=("status", "ended_on")) + + # end run2's session now + run2.status = FlowRun.STATUS_EXPIRED + run2.exited_on = timezone.now() + run2.save(update_fields=("status", "exited_on")) + run4.session.status = FlowSession.STATUS_EXPIRED + run2.session.ended_on = timezone.now() + run2.session.save(update_fields=("status", "ended_on")) + + trim_flow_sessions() + + run1, run2, run3, run4 = FlowRun.objects.order_by("id") + + self.assertIsNone(run1.session) + self.assertIsNotNone(run2.session) # ended too recently to be deleted + self.assertIsNotNone(run3.session) # never ended + self.assertIsNone(run4.session) + + # only sessions for run2 and run3 are left + self.assertEqual(FlowSession.objects.count(), 2) diff --git a/temba/flows/tests/test_sessioncrudl.py b/temba/flows/tests/test_sessioncrudl.py new file mode 100644 index 00000000000..231e99eef94 --- /dev/null +++ b/temba/flows/tests/test_sessioncrudl.py @@ -0,0 +1,49 @@ +import io + +from django.urls import reverse + +from temba.flows.models import FlowSession +from temba.tests import TembaTest +from temba.tests.engine import MockSessionWriter +from temba.utils import json, s3 + + +class FlowSessionCRUDLTest(TembaTest): + def test_session_json(self): + contact = self.create_contact("Bob", phone="+1234567890") + flow = self.get_flow("color_v13") + + session = MockSessionWriter(contact, flow).wait().save().session + + # normal users can't see session json + json_url = reverse("flows.flowsession_json", args=[session.uuid]) + response = self.client.get(json_url) + self.assertLoginRedirect(response) + + self.login(self.admin) + response = self.client.get(json_url) + self.assertLoginRedirect(response) + + # but logged in as a CS rep we can + self.login(self.customer_support, choose_org=self.org) + + response = self.client.get(json_url) + self.assertEqual(200, response.status_code) + + response_json = json.loads(response.content) + self.assertEqual("Nyaruka", response_json["_metadata"]["org"]) + self.assertEqual(session.uuid, response_json["uuid"]) + + # now try with an s3 session + s3.client().put_object( + Bucket="test-sessions", Key="c/session.json", Body=io.BytesIO(json.dumps(session.output).encode()) + ) + FlowSession.objects.filter(id=session.id).update( + output_url="http://minio:9000/test-sessions/c/session.json", output=None + ) + + # fetch our contact history + response = self.client.get(json_url) + self.assertEqual(200, response.status_code) + self.assertEqual("Nyaruka", response_json["_metadata"]["org"]) + self.assertEqual(session.uuid, response_json["uuid"]) diff --git a/temba/flows/tests/test_simulation.py b/temba/flows/tests/test_simulation.py new file mode 100644 index 00000000000..d7f250005bf --- /dev/null +++ b/temba/flows/tests/test_simulation.py @@ -0,0 +1,136 @@ +import decimal +from unittest.mock import patch + +from django.test.utils import override_settings +from django.urls import reverse +from django.utils import timezone + +from temba.tests import MockJsonResponse, TembaTest +from temba.utils import json +from temba.utils.uuid import uuid4 + + +class SimulationTest(TembaTest): + def add_message(self, payload, text): + """ + Add a message to the payload for the flow server using the default contact + """ + payload["resume"] = { + "type": "msg", + "resumed_on": timezone.now().isoformat(), + "msg": {"text": text, "uuid": str(uuid4()), "urn": "tel:+12065551212"}, + } + + def get_replies(self, response): + """ + Gets any replies in a response from the flow server as a list of strings + """ + replies = [] + for event in response.get("events", []): + if event["type"] == "broadcast_created": + replies.append(event["translations"][event["base_language"]]["text"]) + elif event["type"] == "msg_created": + replies.append(event["msg"]["text"]) + return replies + + def test_simulation_ivr(self): + self.login(self.admin) + flow = self.get_flow("ivr") + + # create our payload + payload = {"version": 2, "trigger": {}, "flow": {}} + url = reverse("flows.flow_simulate", args=[flow.id]) + + with override_settings(MAILROOM_AUTH_TOKEN="sesame", MAILROOM_URL="https://mailroom.temba.io"): + with patch("requests.post") as mock_post: + mock_post.return_value = MockJsonResponse(200, {"session": {}}) + response = self.client.post(url, payload, content_type="application/json") + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"session": {}}) + + # since this is an IVR flow, the session trigger will have a connection + self.assertEqual( + { + "call": { + "channel": {"uuid": "440099cf-200c-4d45-a8e7-4a564f4a0e8b", "name": "Test Channel"}, + "urn": "tel:+12065551212", + }, + "environment": { + "date_format": "DD-MM-YYYY", + "time_format": "tt:mm", + "timezone": "Africa/Kigali", + "allowed_languages": ["eng", "kin"], + "default_country": "RW", + "redaction_policy": "none", + "input_collation": "default", + }, + "user": {"email": "admin@textit.com", "name": "Andy"}, + }, + json.loads(mock_post.call_args[1]["data"])["trigger"], + ) + + def test_simulation(self): + self.login(self.admin) + flow = self.get_flow("favorites") + + # create our payload + payload = dict(version=2, trigger={}, flow={}) + + url = reverse("flows.flow_simulate", args=[flow.pk]) + + with override_settings(MAILROOM_AUTH_TOKEN="sesame", MAILROOM_URL="https://mailroom.temba.io"): + with patch("requests.post") as mock_post: + mock_post.return_value = MockJsonResponse(400, {"session": {}}) + response = self.client.post(url, json.dumps(payload), content_type="application/json") + self.assertEqual(500, response.status_code) + + # start a flow + with patch("requests.post") as mock_post: + mock_post.return_value = MockJsonResponse(200, {"session": {}}) + response = self.client.post(url, json.dumps(payload), content_type="application/json") + self.assertEqual(200, response.status_code) + self.assertEqual({}, response.json()["session"]) + + actual_url = mock_post.call_args_list[0][0][0] + actual_payload = json.loads(mock_post.call_args_list[0][1]["data"]) + actual_headers = mock_post.call_args_list[0][1]["headers"] + + self.assertEqual(actual_url, "https://mailroom.temba.io/mr/sim/start") + self.assertEqual(actual_payload["org_id"], flow.org_id) + self.assertEqual(actual_payload["trigger"]["environment"]["date_format"], "DD-MM-YYYY") + self.assertEqual(len(actual_payload["assets"]["channels"]), 1) # fake channel + self.assertEqual(len(actual_payload["flows"]), 1) + self.assertEqual(actual_headers["Authorization"], "Token sesame") + self.assertEqual(actual_headers["Content-Type"], "application/json") + + # try a resume + payload = { + "version": 2, + "session": {"contact": {"fields": {"age": decimal.Decimal("39")}}}, + "resume": {}, + "flow": {}, + } + + with patch("requests.post") as mock_post: + mock_post.return_value = MockJsonResponse(400, {"session": {}}) + response = self.client.post(url, json.dumps(payload), content_type="application/json") + self.assertEqual(500, response.status_code) + + with patch("requests.post") as mock_post: + mock_post.return_value = MockJsonResponse(200, {"session": {}}) + response = self.client.post(url, json.dumps(payload), content_type="application/json") + self.assertEqual(200, response.status_code) + self.assertEqual({}, response.json()["session"]) + + actual_url = mock_post.call_args_list[0][0][0] + actual_payload = json.loads(mock_post.call_args_list[0][1]["data"]) + actual_headers = mock_post.call_args_list[0][1]["headers"] + + self.assertEqual(actual_url, "https://mailroom.temba.io/mr/sim/resume") + self.assertEqual(actual_payload["org_id"], flow.org_id) + self.assertEqual(actual_payload["resume"]["environment"]["date_format"], "DD-MM-YYYY") + self.assertEqual(len(actual_payload["assets"]["channels"]), 1) # fake channel + self.assertEqual(len(actual_payload["flows"]), 1) + self.assertEqual(actual_headers["Authorization"], "Token sesame") + self.assertEqual(actual_headers["Content-Type"], "application/json") diff --git a/temba/flows/tests/test_start.py b/temba/flows/tests/test_start.py new file mode 100644 index 00000000000..2a17dc63601 --- /dev/null +++ b/temba/flows/tests/test_start.py @@ -0,0 +1,39 @@ +from temba import mailroom +from temba.flows.models import FlowStart +from temba.tests import TembaTest, mock_mailroom + + +class FlowStartTest(TembaTest): + def test_model(self): + flow = self.create_flow("Test Flow") + contact = self.create_contact("Bob", phone="+1234567890") + start = FlowStart.create(flow, self.admin, contacts=[contact]) + + self.assertEqual(f'', repr(start)) + self.assertTrue(FlowStart.has_unfinished(self.org)) + + start.interrupt(self.editor) + + start.refresh_from_db() + self.assertEqual(FlowStart.STATUS_INTERRUPTED, start.status) + self.assertEqual(self.editor, start.modified_by) + self.assertIsNotNone(start.modified_on) + self.assertFalse(FlowStart.has_unfinished(self.org)) + + @mock_mailroom + def test_preview(self, mr_mocks): + flow = self.create_flow("Test") + contact1 = self.create_contact("Ann", phone="+1234567111") + contact2 = self.create_contact("Bob", phone="+1234567222") + doctors = self.create_group("Doctors", contacts=[contact1, contact2]) + + mr_mocks.flow_start_preview(query='group = "Doctors" AND status = "active"', total=100) + + query, total = FlowStart.preview( + flow, + include=mailroom.Inclusions(group_uuids=[str(doctors.uuid)]), + exclude=mailroom.Exclusions(non_active=True), + ) + + self.assertEqual('group = "Doctors" AND status = "active"', query) + self.assertEqual(100, total) diff --git a/temba/flows/tests/test_startcrudl.py b/temba/flows/tests/test_startcrudl.py new file mode 100644 index 00000000000..a6bd01fd4ed --- /dev/null +++ b/temba/flows/tests/test_startcrudl.py @@ -0,0 +1,69 @@ +from django.urls import reverse + +from temba.flows.models import FlowStart, FlowStartCount +from temba.tests import CRUDLTestMixin, TembaTest + + +class FlowStartCRUDLTest(TembaTest, CRUDLTestMixin): + def test_list(self): + list_url = reverse("flows.flowstart_list") + + flow1 = self.create_flow("Test Flow 1") + flow2 = self.create_flow("Test 2") + + contact = self.create_contact("Bob", phone="+1234567890") + group = self.create_group("Testers", contacts=[contact]) + start1 = FlowStart.create(flow1, self.admin, contacts=[contact]) + start2 = FlowStart.create( + flow1, self.admin, query="name ~ Bob", start_type="A", exclusions={"started_previously": True} + ) + start3 = FlowStart.create(flow2, self.admin, groups=[group], start_type="Z", exclusions={"in_a_flow": True}) + + flow2.release(self.admin) + + FlowStartCount.objects.create(start=start3, count=1000) + FlowStartCount.objects.create(start=start3, count=234) + + other_org_flow = self.create_flow("Test", org=self.org2) + FlowStart.create(other_org_flow, self.admin2) + + self.assertRequestDisallowed(list_url, [None, self.agent]) + response = self.assertListFetch( + list_url, [self.user, self.editor, self.admin], context_objects=[start3, start2, start1] + ) + + self.assertContains(response, "Test Flow 1") + self.assertNotContains(response, "Test Flow 2") + self.assertContains(response, "A deleted flow") + self.assertContains(response, "was started by admin@textit.com") + self.assertContains(response, "was started by an API call") + self.assertContains(response, "was started by Zapier") + self.assertContains(response, "Not in a flow") + + response = self.assertListFetch(list_url + "?type=manual", [self.admin], context_objects=[start1]) + self.assertTrue(response.context["filtered"]) + self.assertEqual(response.context["url_params"], "?type=manual&") + + def test_status(self): + flow = self.create_flow("Test Flow 1") + contact = self.create_contact("Bob", phone="+1234567890") + start = FlowStart.create(flow, self.admin, contacts=[contact]) + + status_url = f"{reverse('flows.flowstart_status')}?id={start.id}&status=P" + self.assertRequestDisallowed(status_url, [self.agent]) + response = self.assertReadFetch(status_url, [self.editor, self.admin]) + + # status returns json + self.assertEqual("Pending", response.json()["results"][0]["status"]) + + def test_interrupt(self): + flow = self.create_flow("Test Flow 1") + contact = self.create_contact("Bob", phone="+1234567890") + start = FlowStart.create(flow, self.admin, contacts=[contact]) + + interrupt_url = reverse("flows.flowstart_interrupt", args=[start.id]) + self.assertRequestDisallowed(interrupt_url, [None, self.user, self.agent]) + self.requestView(interrupt_url, self.admin, post_data={}) + + start.refresh_from_db() + self.assertEqual(FlowStart.STATUS_INTERRUPTED, start.status) diff --git a/temba/flows/urls.py b/temba/flows/urls.py index be3465e876e..42cfd4c98f4 100644 --- a/temba/flows/urls.py +++ b/temba/flows/urls.py @@ -1,12 +1,7 @@ -from django.urls import re_path - -from .views import FlowCRUDL, FlowLabelCRUDL, FlowRunCRUDL, FlowSessionCRUDL, FlowStartCRUDL, PartialTemplate +from .views import FlowCRUDL, FlowLabelCRUDL, FlowRunCRUDL, FlowSessionCRUDL, FlowStartCRUDL urlpatterns = FlowCRUDL().as_urlpatterns() urlpatterns += FlowLabelCRUDL().as_urlpatterns() urlpatterns += FlowRunCRUDL().as_urlpatterns() urlpatterns += FlowSessionCRUDL().as_urlpatterns() urlpatterns += FlowStartCRUDL().as_urlpatterns() -urlpatterns += [ - re_path(r"^partials/(?P