Skip to content

Commit

Permalink
Filter discarded conversations via api (#1689)
Browse files Browse the repository at this point in the history
issue: #[1648](#1648)
  • Loading branch information
beastoin authored Jan 14, 2025
2 parents 3630e07 + a14f5d0 commit 49aeb6e
Show file tree
Hide file tree
Showing 7 changed files with 60 additions and 50 deletions.
18 changes: 14 additions & 4 deletions app/lib/backend/http/api/conversations.dart
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,13 @@ Future<CreateConversationResponse?> processInProgressConversation() async {
}

Future<List<ServerConversation>> getConversations(
{int limit = 50, int offset = 0, List<ConversationStatus> statuses = const []}) async {
{int limit = 50,
int offset = 0,
List<ConversationStatus> statuses = const [],
bool includeDiscarded = true}) async {
var response = await makeApiCall(
url:
'${Env.apiBaseUrl}v1/memories?limit=$limit&offset=$offset&statuses=${statuses.map((val) => val.toString().split(".").last).join(",")}',
'${Env.apiBaseUrl}v1/memories?include_discarded=$includeDiscarded&limit=$limit&offset=$offset&statuses=${statuses.map((val) => val.toString().split(".").last).join(",")}',
headers: {},
method: 'GET',
body: '');
Expand Down Expand Up @@ -353,12 +356,19 @@ Future<SyncLocalFilesResponse> syncLocalFiles(List<File> files) async {
}
}

Future<(List<ServerConversation>, int, int)> searchConversationsServer(String query, [int? page, int? limit]) async {
Future<(List<ServerConversation>, int, int)> searchConversationsServer(
String query, {
int? page,
int? limit,
bool includeDiscarded = true,
}) async {
debugPrint(Env.apiBaseUrl);
var response = await makeApiCall(
url: '${Env.apiBaseUrl}v1/memories/search',
headers: {},
method: 'POST',
body: jsonEncode({'query': query, 'page': page ?? 1, 'per_page': limit ?? 10}),
body:
jsonEncode({'query': query, 'page': page ?? 1, 'per_page': limit ?? 10, 'include_discarded': includeDiscarded}),
);
if (response == null) return (<ServerConversation>[], 0, 0);
if (response.statusCode == 200) {
Expand Down
10 changes: 5 additions & 5 deletions app/lib/gen/assets.gen.dart

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

41 changes: 15 additions & 26 deletions app/lib/providers/conversation_provider.dart
Original file line number Diff line number Diff line change
Expand Up @@ -82,13 +82,9 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener
return;
}

if (query == previousQuery) {
return;
}

setIsFetchingConversations(true);
previousQuery = query;
var (convos, current, total) = await searchConversationsServer(query);
var (convos, current, total) = await searchConversationsServer(query, includeDiscarded: showDiscardedConversations);
convos.sort((a, b) => b.createdAt.compareTo(a.createdAt));
searchedConversations = convos;
currentSearchPage = current;
Expand All @@ -106,7 +102,8 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener
setLoadingConversations(true);
var (newConvos, current, total) = await searchConversationsServer(
previousQuery,
currentSearchPage + 1,
page: currentSearchPage + 1,
includeDiscarded: showDiscardedConversations,
);
searchedConversations.addAll(newConvos);
searchedConversations.sort((a, b) => b.createdAt.compareTo(a.createdAt));
Expand Down Expand Up @@ -153,9 +150,9 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener
showDiscardedConversations = !showDiscardedConversations;

if (previousQuery.isNotEmpty) {
groupSearchConvosByDate();
searchConversations(previousQuery);
} else {
groupConversationsByDate();
fetchConversations();
}

MixpanelManager().showDiscardedMemoriesToggled(showDiscardedConversations);
Expand All @@ -166,14 +163,12 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener
notifyListeners();
}

Future getInitialConversations() async {
// reset search
Future fetchConversations() async {
previousQuery = "";
currentSearchPage = 0;
totalSearchPages = 0;
searchedConversations = [];

// fetch convos
conversations = await getConversationsFromServer();

processingConversations = conversations.where((m) => m.status == ConversationStatus.processing).toList();
Expand All @@ -188,23 +183,16 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener
searchedConversations = conversations;
}
_groupConversationsByDateWithoutNotify();

notifyListeners();
}

Future getInitialConversations() async {
await fetchConversations();
}

List<ServerConversation> _filterOutConvos(List<ServerConversation> convos) {
var havingFilters = true;
if (showDiscardedConversations) {
havingFilters = false;
}
if (!havingFilters) {
return convos;
}
return convos.where((convo) {
if (!showDiscardedConversations && (convo.discarded && !convo.isNew)) {
return false;
}
return true;
}).toList();
return convos;
}

void _groupSearchConvosByDateWithoutNotify() {
Expand Down Expand Up @@ -251,7 +239,7 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener

Future getConversationsFromServer() async {
setLoadingConversations(true);
var mem = await getConversations();
var mem = await getConversations(includeDiscarded: showDiscardedConversations);
conversations = mem;
conversations.sort((a, b) => b.createdAt.compareTo(a.createdAt));
setLoadingConversations(false);
Expand Down Expand Up @@ -281,7 +269,8 @@ class ConversationProvider extends ChangeNotifier implements IWalServiceListener
if (conversations.length % 50 != 0) return;
if (isLoadingConversations) return;
setLoadingConversations(true);
var newConversations = await getConversations(offset: conversations.length);
var newConversations =
await getConversations(offset: conversations.length, includeDiscarded: showDiscardedConversations);
conversations.addAll(newConversations);
conversations.sort((a, b) => b.createdAt.compareTo(a.createdAt));
groupConversationsByDate();
Expand Down
1 change: 1 addition & 0 deletions backend/models/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,3 +274,4 @@ class SearchRequest(BaseModel):
query: str
page: Optional[int] = 1
per_page: Optional[int] = 10
include_discarded: Optional[bool] = True
26 changes: 13 additions & 13 deletions backend/routers/memories.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,9 @@ def reprocess_memory(


@router.get('/v1/memories', response_model=List[Memory], tags=['memories'])
def get_memories(limit: int = 100, offset: int = 0, statuses: str = "", uid: str = Depends(auth.get_current_user_uid)):
def get_memories(limit: int = 100, offset: int = 0, statuses: str = "", include_discarded: bool = True, uid: str = Depends(auth.get_current_user_uid)):
print('get_memories', uid, limit, offset, statuses)
return memories_db.get_memories(uid, limit, offset, include_discarded=True,
return memories_db.get_memories(uid, limit, offset, include_discarded=include_discarded,
statuses=statuses.split(",") if len(statuses) > 0 else [])


Expand Down Expand Up @@ -202,16 +202,16 @@ def set_assignee_memory_segment(
raise HTTPException(status_code=400, detail="Invalid assign type")

memories_db.update_memory_segments(uid, memory_id, [segment.dict() for segment in memory.transcript_segments])
segment_words = len(memory.transcript_segments[segment_idx].text.split(' '))

# TODO: can do this async
if use_for_speech_training and not is_unassigning and segment_words > 5: # some decent sample at least
person_id = value if assign_type == 'person_id' else None
expand_speech_profile(memory_id, uid, segment_idx, assign_type, person_id)
else:
path = f'{memory_id}_segment_{segment_idx}.wav'
delete_additional_profile_audio(uid, path)
delete_speech_sample_for_people(uid, path)
# thinh's note: disabled for now
# segment_words = len(memory.transcript_segments[segment_idx].text.split(' '))
# # TODO: can do this async
# if use_for_speech_training and not is_unassigning and segment_words > 5: # some decent sample at least
# person_id = value if assign_type == 'person_id' else None
# expand_speech_profile(memory_id, uid, segment_idx, assign_type, person_id)
# else:
# path = f'{memory_id}_segment_{segment_idx}.wav'
# delete_additional_profile_audio(uid, path)
# delete_speech_sample_for_people(uid, path)

return memory

Expand Down Expand Up @@ -343,4 +343,4 @@ def get_public_memories(offset: int = 0, limit: int = 1000):
@router.post("/v1/memories/search", response_model=dict, tags=['memories'])
def search_memories_endpoint(search_request: SearchRequest, uid: str = Depends(auth.get_current_user_uid)):
return search_memories(query=search_request.query, page=search_request.page,
per_page=search_request.per_page, uid=uid)
per_page=search_request.per_page, uid=uid, include_discarded=search_request.include_discarded)
6 changes: 5 additions & 1 deletion backend/typesense/memories.schema
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,12 @@
{
"name": "userId",
"type": "string"
},
{
"name": "discarded",
"type": "bool"
}
],
"default_sorting_field": "created_at",
"enable_nested_fields": true
}
}
8 changes: 7 additions & 1 deletion backend/utils/memories/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,18 @@ def search_memories(
query: str,
page: int = 1,
per_page: int = 10,
include_discarded: bool = True,
) -> Dict:
try:

filter_by = f'userId:={uid} && deleted:=false'
if not include_discarded:
filter_by = filter_by + ' && discarded:=false'

search_parameters = {
'q': query,
'query_by': 'structured, transcript_segments',
'filter_by': 'userId := ' + uid,
'filter_by': filter_by,
'sort_by': 'created_at:desc',
'per_page': per_page,
'page': page,
Expand Down

0 comments on commit 49aeb6e

Please sign in to comment.