mirror of https://github.com/chipsenkbeil/distant
Add search support (#131)
parent
5130ee3b5f
commit
01610a3ac7
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,626 @@
|
|||||||
|
use crate::{
|
||||||
|
client::{DistantChannel, DistantChannelExt},
|
||||||
|
constants::CLIENT_SEARCHER_CAPACITY,
|
||||||
|
data::{DistantRequestData, DistantResponseData, SearchId, SearchQuery, SearchQueryMatch},
|
||||||
|
DistantMsg,
|
||||||
|
};
|
||||||
|
use distant_net::Request;
|
||||||
|
use log::*;
|
||||||
|
use std::{fmt, io};
|
||||||
|
use tokio::{sync::mpsc, task::JoinHandle};
|
||||||
|
|
||||||
|
/// Represents a searcher for files, directories, and symlinks on the filesystem
|
||||||
|
pub struct Searcher {
|
||||||
|
channel: DistantChannel,
|
||||||
|
id: SearchId,
|
||||||
|
query: SearchQuery,
|
||||||
|
task: JoinHandle<()>,
|
||||||
|
rx: mpsc::Receiver<SearchQueryMatch>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Searcher {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("Searcher")
|
||||||
|
.field("id", &self.id)
|
||||||
|
.field("query", &self.query)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Searcher {
|
||||||
|
/// Creates a searcher for some query
|
||||||
|
pub async fn search(mut channel: DistantChannel, query: SearchQuery) -> io::Result<Self> {
|
||||||
|
trace!("Searching using {query:?}",);
|
||||||
|
|
||||||
|
// Submit our run request and get back a mailbox for responses
|
||||||
|
let mut mailbox = channel
|
||||||
|
.mail(Request::new(DistantMsg::Single(
|
||||||
|
DistantRequestData::Search {
|
||||||
|
query: query.clone(),
|
||||||
|
},
|
||||||
|
)))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let (tx, rx) = mpsc::channel(CLIENT_SEARCHER_CAPACITY);
|
||||||
|
|
||||||
|
// Wait to get the confirmation of watch as either ok or error
|
||||||
|
let mut queue: Vec<SearchQueryMatch> = Vec::new();
|
||||||
|
let mut search_id = None;
|
||||||
|
while let Some(res) = mailbox.next().await {
|
||||||
|
for data in res.payload.into_vec() {
|
||||||
|
match data {
|
||||||
|
// If we get results before the started indicator, queue them up
|
||||||
|
DistantResponseData::SearchResults { matches, .. } => {
|
||||||
|
queue.extend(matches);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Once we get the started indicator, mark as ready to go
|
||||||
|
DistantResponseData::SearchStarted { id } => {
|
||||||
|
trace!("[Query {id}] Searcher has started");
|
||||||
|
search_id = Some(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we get an explicit error, convert and return it
|
||||||
|
DistantResponseData::Error(x) => return Err(io::Error::from(x)),
|
||||||
|
|
||||||
|
// Otherwise, we got something unexpected, and report as such
|
||||||
|
x => {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
format!("Unexpected response: {:?}", x),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exit if we got the confirmation
|
||||||
|
// NOTE: Doing this later because we want to make sure the entire payload is processed
|
||||||
|
// first before exiting the loop
|
||||||
|
if search_id.is_some() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let search_id = match search_id {
|
||||||
|
// Send out any of our queued changes that we got prior to the acknowledgement
|
||||||
|
Some(id) => {
|
||||||
|
trace!("[Query {id}] Forwarding {} queued matches", queue.len());
|
||||||
|
for r#match in queue.drain(..) {
|
||||||
|
if tx.send(r#match).await.is_err() {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
format!("[Query {id}] Queue search match dropped"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
id
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we never received an acknowledgement of search before the mailbox closed,
|
||||||
|
// fail with a missing confirmation error
|
||||||
|
None => {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
"Search query missing started confirmation",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Spawn a task that continues to look for search result events and the conclusion of the
|
||||||
|
// search, discarding anything else that it gets
|
||||||
|
let task = tokio::spawn({
|
||||||
|
async move {
|
||||||
|
while let Some(res) = mailbox.next().await {
|
||||||
|
let mut done = false;
|
||||||
|
|
||||||
|
for data in res.payload.into_vec() {
|
||||||
|
match data {
|
||||||
|
DistantResponseData::SearchResults { matches, .. } => {
|
||||||
|
// If we can't queue up a match anymore, we've
|
||||||
|
// been closed and therefore want to quit
|
||||||
|
if tx.is_closed() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, send over the matches
|
||||||
|
for r#match in matches {
|
||||||
|
if let Err(x) = tx.send(r#match).await {
|
||||||
|
error!(
|
||||||
|
"[Query {search_id}] Searcher failed to send match {:?}",
|
||||||
|
x.0
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Received completion indicator, so close out
|
||||||
|
DistantResponseData::SearchDone { .. } => {
|
||||||
|
trace!("[Query {search_id}] Searcher has finished");
|
||||||
|
done = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if done {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
id: search_id,
|
||||||
|
query,
|
||||||
|
channel,
|
||||||
|
task,
|
||||||
|
rx,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the query this searcher is running
|
||||||
|
pub fn query(&self) -> &SearchQuery {
|
||||||
|
&self.query
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the searcher is still actively searching
|
||||||
|
pub fn is_active(&self) -> bool {
|
||||||
|
!self.task.is_finished()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the next match detected by the searcher, or none if the searcher has concluded
|
||||||
|
pub async fn next(&mut self) -> Option<SearchQueryMatch> {
|
||||||
|
self.rx.recv().await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Cancels the search being performed by the watcher
|
||||||
|
pub async fn cancel(&mut self) -> io::Result<()> {
|
||||||
|
trace!("[Query {}] Cancelling search", self.id);
|
||||||
|
self.channel.cancel_search(self.id).await?;
|
||||||
|
|
||||||
|
// Kill our task that processes inbound matches if we have successfully stopped searching
|
||||||
|
self.task.abort();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::data::{
|
||||||
|
SearchQueryCondition, SearchQueryMatchData, SearchQueryOptions, SearchQueryPathMatch,
|
||||||
|
SearchQuerySubmatch, SearchQueryTarget,
|
||||||
|
};
|
||||||
|
use crate::DistantClient;
|
||||||
|
use distant_net::{
|
||||||
|
Client, FramedTransport, InmemoryTransport, IntoSplit, PlainCodec, Response,
|
||||||
|
TypedAsyncRead, TypedAsyncWrite,
|
||||||
|
};
|
||||||
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
fn make_session() -> (
|
||||||
|
FramedTransport<InmemoryTransport, PlainCodec>,
|
||||||
|
DistantClient,
|
||||||
|
) {
|
||||||
|
let (t1, t2) = FramedTransport::pair(100);
|
||||||
|
let (writer, reader) = t2.into_split();
|
||||||
|
(t1, Client::new(writer, reader).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn searcher_should_have_query_reflect_ongoing_query() {
|
||||||
|
let (mut transport, session) = make_session();
|
||||||
|
let test_query = SearchQuery {
|
||||||
|
path: PathBuf::from("/some/test/path"),
|
||||||
|
target: SearchQueryTarget::Path,
|
||||||
|
condition: SearchQueryCondition::Regex {
|
||||||
|
value: String::from("."),
|
||||||
|
},
|
||||||
|
options: SearchQueryOptions::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a task for searcher as we need to handle the request and a response
|
||||||
|
// in a separate async block
|
||||||
|
let search_task = {
|
||||||
|
let test_query = test_query.clone();
|
||||||
|
tokio::spawn(async move { Searcher::search(session.clone_channel(), test_query).await })
|
||||||
|
};
|
||||||
|
|
||||||
|
// Wait until we get the request from the session
|
||||||
|
let req: Request<DistantRequestData> = transport.read().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send back an acknowledgement that a search was started
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id,
|
||||||
|
DistantResponseData::SearchStarted { id: rand::random() },
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Get the searcher and verify the query
|
||||||
|
let searcher = search_task.await.unwrap().unwrap();
|
||||||
|
assert_eq!(searcher.query(), &test_query);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn searcher_should_support_getting_next_match() {
|
||||||
|
let (mut transport, session) = make_session();
|
||||||
|
let test_query = SearchQuery {
|
||||||
|
path: PathBuf::from("/some/test/path"),
|
||||||
|
target: SearchQueryTarget::Path,
|
||||||
|
condition: SearchQueryCondition::Regex {
|
||||||
|
value: String::from("."),
|
||||||
|
},
|
||||||
|
options: SearchQueryOptions::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a task for searcher as we need to handle the request and a response
|
||||||
|
// in a separate async block
|
||||||
|
let search_task =
|
||||||
|
tokio::spawn(
|
||||||
|
async move { Searcher::search(session.clone_channel(), test_query).await },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait until we get the request from the session
|
||||||
|
let req: Request<DistantRequestData> = transport.read().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send back an acknowledgement that a searcher was created
|
||||||
|
let id = rand::random::<SearchId>();
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id.clone(),
|
||||||
|
DistantResponseData::SearchStarted { id },
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Get the searcher
|
||||||
|
let mut searcher = search_task.await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send some matches related to the file
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id,
|
||||||
|
vec![
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/1"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match".to_string()),
|
||||||
|
start: 3,
|
||||||
|
end: 7,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/2"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 2".to_string()),
|
||||||
|
start: 88,
|
||||||
|
end: 99,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/3"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 3".to_string()),
|
||||||
|
start: 5,
|
||||||
|
end: 9,
|
||||||
|
}],
|
||||||
|
})],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Verify that the searcher gets the matches, one at a time
|
||||||
|
let m = searcher.next().await.expect("Searcher closed unexpectedly");
|
||||||
|
assert_eq!(
|
||||||
|
m,
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/1"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match".to_string()),
|
||||||
|
start: 3,
|
||||||
|
end: 7,
|
||||||
|
}],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
let m = searcher.next().await.expect("Searcher closed unexpectedly");
|
||||||
|
assert_eq!(
|
||||||
|
m,
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/2"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 2".to_string()),
|
||||||
|
start: 88,
|
||||||
|
end: 99,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
let m = searcher.next().await.expect("Searcher closed unexpectedly");
|
||||||
|
assert_eq!(
|
||||||
|
m,
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/3"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 3".to_string()),
|
||||||
|
start: 5,
|
||||||
|
end: 9,
|
||||||
|
}],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn searcher_should_distinguish_match_events_and_only_receive_matches_for_itself() {
|
||||||
|
let (mut transport, session) = make_session();
|
||||||
|
|
||||||
|
let test_query = SearchQuery {
|
||||||
|
path: PathBuf::from("/some/test/path"),
|
||||||
|
target: SearchQueryTarget::Path,
|
||||||
|
condition: SearchQueryCondition::Regex {
|
||||||
|
value: String::from("."),
|
||||||
|
},
|
||||||
|
options: SearchQueryOptions::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a task for searcher as we need to handle the request and a response
|
||||||
|
// in a separate async block
|
||||||
|
let search_task =
|
||||||
|
tokio::spawn(
|
||||||
|
async move { Searcher::search(session.clone_channel(), test_query).await },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait until we get the request from the session
|
||||||
|
let req: Request<DistantRequestData> = transport.read().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send back an acknowledgement that a searcher was created
|
||||||
|
let id = rand::random();
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id.clone(),
|
||||||
|
DistantResponseData::SearchStarted { id },
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Get the searcher
|
||||||
|
let mut searcher = search_task.await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send a match from the appropriate origin
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id.clone(),
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/1"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match".to_string()),
|
||||||
|
start: 3,
|
||||||
|
end: 7,
|
||||||
|
}],
|
||||||
|
})],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Send a chanmatchge from a different origin
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id.clone() + "1",
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/2"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 2".to_string()),
|
||||||
|
start: 88,
|
||||||
|
end: 99,
|
||||||
|
}],
|
||||||
|
})],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Send a chanmatchge from the appropriate origin
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id,
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/3"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 3".to_string()),
|
||||||
|
start: 5,
|
||||||
|
end: 9,
|
||||||
|
}],
|
||||||
|
})],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Verify that the searcher gets the matches, one at a time
|
||||||
|
let m = searcher.next().await.expect("Searcher closed unexpectedly");
|
||||||
|
assert_eq!(
|
||||||
|
m,
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/1"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match".to_string()),
|
||||||
|
start: 3,
|
||||||
|
end: 7,
|
||||||
|
}],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
let m = searcher.next().await.expect("Watcher closed unexpectedly");
|
||||||
|
assert_eq!(
|
||||||
|
m,
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/3"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 3".to_string()),
|
||||||
|
start: 5,
|
||||||
|
end: 9,
|
||||||
|
}],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn searcher_should_stop_receiving_events_if_cancelled() {
|
||||||
|
let (mut transport, session) = make_session();
|
||||||
|
|
||||||
|
let test_query = SearchQuery {
|
||||||
|
path: PathBuf::from("/some/test/path"),
|
||||||
|
target: SearchQueryTarget::Path,
|
||||||
|
condition: SearchQueryCondition::Regex {
|
||||||
|
value: String::from("."),
|
||||||
|
},
|
||||||
|
options: SearchQueryOptions::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a task for searcher as we need to handle the request and a response
|
||||||
|
// in a separate async block
|
||||||
|
let search_task =
|
||||||
|
tokio::spawn(
|
||||||
|
async move { Searcher::search(session.clone_channel(), test_query).await },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Wait until we get the request from the session
|
||||||
|
let req: Request<DistantRequestData> = transport.read().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send back an acknowledgement that a watcher was created
|
||||||
|
let id = rand::random::<SearchId>();
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id.clone(),
|
||||||
|
DistantResponseData::SearchStarted { id },
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Send some matches from the appropriate origin
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id,
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/1"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match".to_string()),
|
||||||
|
start: 3,
|
||||||
|
end: 7,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/2"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 2".to_string()),
|
||||||
|
start: 88,
|
||||||
|
end: 99,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Wait a little bit for all matches to be queued
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||||
|
|
||||||
|
// Create a task for for cancelling as we need to handle the request and a response
|
||||||
|
// in a separate async block
|
||||||
|
let searcher = Arc::new(Mutex::new(search_task.await.unwrap().unwrap()));
|
||||||
|
|
||||||
|
// Verify that the searcher gets the first match
|
||||||
|
let m = searcher
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.next()
|
||||||
|
.await
|
||||||
|
.expect("Searcher closed unexpectedly");
|
||||||
|
assert_eq!(
|
||||||
|
m,
|
||||||
|
SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/1"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match".to_string()),
|
||||||
|
start: 3,
|
||||||
|
end: 7,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Cancel the search, verify the request is sent out, and respond with ok
|
||||||
|
let searcher_2 = Arc::clone(&searcher);
|
||||||
|
let cancel_task = tokio::spawn(async move { searcher_2.lock().await.cancel().await });
|
||||||
|
|
||||||
|
let req: Request<DistantRequestData> = transport.read().await.unwrap().unwrap();
|
||||||
|
|
||||||
|
transport
|
||||||
|
.write(Response::new(req.id.clone(), DistantResponseData::Ok))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Wait for the cancel to complete
|
||||||
|
cancel_task.await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Send a match that will get ignored
|
||||||
|
transport
|
||||||
|
.write(Response::new(
|
||||||
|
req.id,
|
||||||
|
DistantResponseData::SearchResults {
|
||||||
|
id,
|
||||||
|
matches: vec![SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/3"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 3".to_string()),
|
||||||
|
start: 5,
|
||||||
|
end: 9,
|
||||||
|
}],
|
||||||
|
})],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Verify that we get any remaining matches that were received before cancel,
|
||||||
|
// but nothing new after that
|
||||||
|
assert_eq!(
|
||||||
|
searcher.lock().await.next().await,
|
||||||
|
Some(SearchQueryMatch::Path(SearchQueryPathMatch {
|
||||||
|
path: PathBuf::from("/some/path/2"),
|
||||||
|
submatches: vec![SearchQuerySubmatch {
|
||||||
|
r#match: SearchQueryMatchData::Text("test match 2".to_string()),
|
||||||
|
start: 88,
|
||||||
|
end: 99,
|
||||||
|
}],
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
assert_eq!(searcher.lock().await.next().await, None);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,335 @@
|
|||||||
|
use super::FileType;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{borrow::Cow, collections::HashSet, path::PathBuf, str::FromStr};
|
||||||
|
|
||||||
|
/// Id associated with a search
|
||||||
|
pub type SearchId = u32;
|
||||||
|
|
||||||
|
/// Represents a query to perform against the filesystem
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
pub struct SearchQuery {
|
||||||
|
/// Path in which to perform the query
|
||||||
|
pub path: PathBuf,
|
||||||
|
|
||||||
|
/// Kind of data to example using conditions
|
||||||
|
pub target: SearchQueryTarget,
|
||||||
|
|
||||||
|
/// Condition to meet to be considered a match
|
||||||
|
pub condition: SearchQueryCondition,
|
||||||
|
|
||||||
|
/// Options to apply to the query
|
||||||
|
#[serde(default)]
|
||||||
|
pub options: SearchQueryOptions,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQuery {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQuery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for SearchQuery {
|
||||||
|
type Err = serde_json::error::Error;
|
||||||
|
|
||||||
|
/// Parses search query from a JSON string
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
serde_json::from_str(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Kind of data to examine using conditions
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum SearchQueryTarget {
|
||||||
|
/// Checks path of file, directory, or symlink
|
||||||
|
Path,
|
||||||
|
|
||||||
|
/// Checks contents of files
|
||||||
|
Contents,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryTarget {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryTarget)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Condition used to find a match in a search query
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
||||||
|
pub enum SearchQueryCondition {
|
||||||
|
/// Begins with some text
|
||||||
|
EndsWith { value: String },
|
||||||
|
|
||||||
|
/// Matches some text exactly
|
||||||
|
Equals { value: String },
|
||||||
|
|
||||||
|
/// Matches some regex
|
||||||
|
Regex { value: String },
|
||||||
|
|
||||||
|
/// Begins with some text
|
||||||
|
StartsWith { value: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchQueryCondition {
|
||||||
|
/// Creates a new instance with `EndsWith` variant
|
||||||
|
pub fn ends_with(value: impl Into<String>) -> Self {
|
||||||
|
Self::EndsWith {
|
||||||
|
value: value.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new instance with `Equals` variant
|
||||||
|
pub fn equals(value: impl Into<String>) -> Self {
|
||||||
|
Self::Equals {
|
||||||
|
value: value.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new instance with `Regex` variant
|
||||||
|
pub fn regex(value: impl Into<String>) -> Self {
|
||||||
|
Self::Regex {
|
||||||
|
value: value.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new instance with `StartsWith` variant
|
||||||
|
pub fn starts_with(value: impl Into<String>) -> Self {
|
||||||
|
Self::StartsWith {
|
||||||
|
value: value.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts the condition in a regex string
|
||||||
|
pub fn to_regex_string(&self) -> String {
|
||||||
|
match self {
|
||||||
|
Self::EndsWith { value } => format!(r"{value}$"),
|
||||||
|
Self::Equals { value } => format!(r"^{value}$"),
|
||||||
|
Self::Regex { value } => value.to_string(),
|
||||||
|
Self::StartsWith { value } => format!(r"^{value}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryCondition {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryCondition)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Options associated with a search query
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
pub struct SearchQueryOptions {
|
||||||
|
/// Restrict search to only these file types (otherwise all are allowed)
|
||||||
|
#[serde(default)]
|
||||||
|
pub allowed_file_types: HashSet<FileType>,
|
||||||
|
|
||||||
|
/// Regex to use to filter paths being searched to only those that match the include condition
|
||||||
|
#[serde(default)]
|
||||||
|
pub include: Option<SearchQueryCondition>,
|
||||||
|
|
||||||
|
/// Regex to use to filter paths being searched to only those that do not match the exclude
|
||||||
|
/// condition
|
||||||
|
#[serde(default)]
|
||||||
|
pub exclude: Option<SearchQueryCondition>,
|
||||||
|
|
||||||
|
/// Search should follow symbolic links
|
||||||
|
#[serde(default)]
|
||||||
|
pub follow_symbolic_links: bool,
|
||||||
|
|
||||||
|
/// Maximum results to return before stopping the query
|
||||||
|
#[serde(default)]
|
||||||
|
pub limit: Option<u64>,
|
||||||
|
|
||||||
|
/// Minimum depth (directories) to search
|
||||||
|
///
|
||||||
|
/// The smallest depth is 0 and always corresponds to the path given to the new function on
|
||||||
|
/// this type. Its direct descendents have depth 1, and their descendents have depth 2, and so
|
||||||
|
/// on.
|
||||||
|
#[serde(default)]
|
||||||
|
pub min_depth: Option<u64>,
|
||||||
|
|
||||||
|
/// Maximum depth (directories) to search
|
||||||
|
///
|
||||||
|
/// The smallest depth is 0 and always corresponds to the path given to the new function on
|
||||||
|
/// this type. Its direct descendents have depth 1, and their descendents have depth 2, and so
|
||||||
|
/// on.
|
||||||
|
///
|
||||||
|
/// Note that this will not simply filter the entries of the iterator, but it will actually
|
||||||
|
/// avoid descending into directories when the depth is exceeded.
|
||||||
|
#[serde(default)]
|
||||||
|
pub max_depth: Option<u64>,
|
||||||
|
|
||||||
|
/// Amount of results to batch before sending back excluding final submission that will always
|
||||||
|
/// include the remaining results even if less than pagination request
|
||||||
|
#[serde(default)]
|
||||||
|
pub pagination: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryOptions {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryOptions)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a match for a search query
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
||||||
|
pub enum SearchQueryMatch {
|
||||||
|
/// Matches part of a file's path
|
||||||
|
Path(SearchQueryPathMatch),
|
||||||
|
|
||||||
|
/// Matches part of a file's contents
|
||||||
|
Contents(SearchQueryContentsMatch),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchQueryMatch {
|
||||||
|
pub fn into_path_match(self) -> Option<SearchQueryPathMatch> {
|
||||||
|
match self {
|
||||||
|
Self::Path(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_contents_match(self) -> Option<SearchQueryContentsMatch> {
|
||||||
|
match self {
|
||||||
|
Self::Contents(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryMatch {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryMatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents details for a match on a path
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
pub struct SearchQueryPathMatch {
|
||||||
|
/// Path associated with the match
|
||||||
|
pub path: PathBuf,
|
||||||
|
|
||||||
|
/// Collection of matches tied to `path` where each submatch's byte offset is relative to
|
||||||
|
/// `path`
|
||||||
|
pub submatches: Vec<SearchQuerySubmatch>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryPathMatch {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryPathMatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents details for a match on a file's contents
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
pub struct SearchQueryContentsMatch {
|
||||||
|
/// Path to file whose contents match
|
||||||
|
pub path: PathBuf,
|
||||||
|
|
||||||
|
/// Line(s) that matched
|
||||||
|
pub lines: SearchQueryMatchData,
|
||||||
|
|
||||||
|
/// Line number where match starts (base index 1)
|
||||||
|
pub line_number: u64,
|
||||||
|
|
||||||
|
/// Absolute byte offset corresponding to the start of `lines` in the data being searched
|
||||||
|
pub absolute_offset: u64,
|
||||||
|
|
||||||
|
/// Collection of matches tied to `lines` where each submatch's byte offset is relative to
|
||||||
|
/// `lines` and not the overall content
|
||||||
|
pub submatches: Vec<SearchQuerySubmatch>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryContentsMatch {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryContentsMatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
pub struct SearchQuerySubmatch {
|
||||||
|
/// Content matched by query
|
||||||
|
pub r#match: SearchQueryMatchData,
|
||||||
|
|
||||||
|
/// Byte offset representing start of submatch (inclusive)
|
||||||
|
pub start: u64,
|
||||||
|
|
||||||
|
/// Byte offset representing end of submatch (exclusive)
|
||||||
|
pub end: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQuerySubmatch {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQuerySubmatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
#[serde(
|
||||||
|
rename_all = "snake_case",
|
||||||
|
deny_unknown_fields,
|
||||||
|
tag = "type",
|
||||||
|
content = "value"
|
||||||
|
)]
|
||||||
|
pub enum SearchQueryMatchData {
|
||||||
|
/// Match represented as UTF-8 text
|
||||||
|
Text(String),
|
||||||
|
|
||||||
|
/// Match represented as bytes
|
||||||
|
Bytes(Vec<u8>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SearchQueryMatchData {
|
||||||
|
/// Creates a new instance with `Text` variant
|
||||||
|
pub fn text(value: impl Into<String>) -> Self {
|
||||||
|
Self::Text(value.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new instance with `Bytes` variant
|
||||||
|
pub fn bytes(value: impl Into<Vec<u8>>) -> Self {
|
||||||
|
Self::Bytes(value.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the UTF-8 str reference to the data, if is valid UTF-8
|
||||||
|
pub fn to_str(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Self::Text(x) => Some(x),
|
||||||
|
Self::Bytes(x) => std::str::from_utf8(x).ok(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts data to a UTF-8 string, replacing any invalid UTF-8 sequences with
|
||||||
|
/// [`U+FFFD REPLACEMENT CHARACTER`](https://doc.rust-lang.org/nightly/core/char/const.REPLACEMENT_CHARACTER.html)
|
||||||
|
pub fn to_string_lossy(&self) -> Cow<'_, str> {
|
||||||
|
match self {
|
||||||
|
Self::Text(x) => Cow::Borrowed(x),
|
||||||
|
Self::Bytes(x) => String::from_utf8_lossy(x),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
|
impl SearchQueryMatchData {
|
||||||
|
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||||
|
schemars::schema_for!(SearchQueryMatchData)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,62 @@
|
|||||||
|
use crate::cli::fixtures::*;
|
||||||
|
use assert_cmd::Command;
|
||||||
|
use assert_fs::prelude::*;
|
||||||
|
use indoc::indoc;
|
||||||
|
use predicates::Predicate;
|
||||||
|
use rstest::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
const SEARCH_RESULTS_REGEX: &str = indoc! {r"
|
||||||
|
.*?[\\/]file1.txt
|
||||||
|
1:some file text
|
||||||
|
|
||||||
|
.*?[\\/]file2.txt
|
||||||
|
3:textual
|
||||||
|
|
||||||
|
.*?[\\/]file3.txt
|
||||||
|
1:more content
|
||||||
|
"};
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn should_search_filesystem_using_query(mut action_cmd: CtxCommand<Command>) {
|
||||||
|
let root = assert_fs::TempDir::new().unwrap();
|
||||||
|
root.child("file1.txt").write_str("some file text").unwrap();
|
||||||
|
root.child("file2.txt")
|
||||||
|
.write_str("lines\nof\ntextual\ninformation")
|
||||||
|
.unwrap();
|
||||||
|
root.child("file3.txt").write_str("more content").unwrap();
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"path": root.path().to_string_lossy(),
|
||||||
|
"target": "contents",
|
||||||
|
"condition": {"type": "regex", "value": "te[a-z]*\\b"},
|
||||||
|
});
|
||||||
|
|
||||||
|
let stdout_predicate_fn = predicates::function::function(|s: &[u8]| {
|
||||||
|
let s = std::str::from_utf8(s).unwrap();
|
||||||
|
|
||||||
|
// Split by empty line, sort, and then rejoin with empty line inbetween
|
||||||
|
let mut lines = s
|
||||||
|
.split("\n\n")
|
||||||
|
.map(|lines| lines.trim_end())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
lines.sort_unstable();
|
||||||
|
|
||||||
|
// Put together sorted text lines
|
||||||
|
let full_text = lines.join("\n\n");
|
||||||
|
|
||||||
|
// Verify that it matches our search results regex
|
||||||
|
let regex_fn = predicates::str::is_match(SEARCH_RESULTS_REGEX).unwrap();
|
||||||
|
|
||||||
|
regex_fn.eval(&full_text)
|
||||||
|
});
|
||||||
|
|
||||||
|
// distant action system-info
|
||||||
|
action_cmd
|
||||||
|
.arg("search")
|
||||||
|
.arg(&serde_json::to_string(&query).unwrap())
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(stdout_predicate_fn)
|
||||||
|
.stderr("");
|
||||||
|
}
|
@ -0,0 +1,82 @@
|
|||||||
|
use crate::cli::fixtures::*;
|
||||||
|
use assert_fs::prelude::*;
|
||||||
|
use rstest::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn should_support_json_search_filesystem_using_query(mut json_repl: CtxCommand<Repl>) {
|
||||||
|
let root = assert_fs::TempDir::new().unwrap();
|
||||||
|
root.child("file1.txt").write_str("some file text").unwrap();
|
||||||
|
root.child("file2.txt")
|
||||||
|
.write_str("lines\nof\ntextual\ninformation")
|
||||||
|
.unwrap();
|
||||||
|
root.child("file3.txt").write_str("more content").unwrap();
|
||||||
|
|
||||||
|
let id = rand::random::<u64>().to_string();
|
||||||
|
let req = json!({
|
||||||
|
"id": id,
|
||||||
|
"payload": {
|
||||||
|
"type": "search",
|
||||||
|
"query": {
|
||||||
|
"path": root.path().to_string_lossy(),
|
||||||
|
"target": "contents",
|
||||||
|
"condition": {"type": "regex", "value": "ua"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Submit search request and get back started confirmation
|
||||||
|
let res = json_repl.write_and_read_json(req).await.unwrap().unwrap();
|
||||||
|
|
||||||
|
// Get id from started confirmation
|
||||||
|
assert_eq!(res["origin_id"], id);
|
||||||
|
assert_eq!(res["payload"]["type"], "search_started");
|
||||||
|
let search_id = res["payload"]["id"]
|
||||||
|
.as_u64()
|
||||||
|
.expect("id missing or not number");
|
||||||
|
|
||||||
|
// Get search results back
|
||||||
|
let res = json_repl.read_json_from_stdout().await.unwrap().unwrap();
|
||||||
|
assert_eq!(res["origin_id"], id);
|
||||||
|
assert_eq!(
|
||||||
|
res["payload"],
|
||||||
|
json!({
|
||||||
|
"type": "search_results",
|
||||||
|
"id": search_id,
|
||||||
|
"matches": [
|
||||||
|
{
|
||||||
|
"type": "contents",
|
||||||
|
"path": root.child("file2.txt").to_string_lossy(),
|
||||||
|
"lines": {
|
||||||
|
"type": "text",
|
||||||
|
"value": "textual\n",
|
||||||
|
},
|
||||||
|
"line_number": 3,
|
||||||
|
"absolute_offset": 9,
|
||||||
|
"submatches": [
|
||||||
|
{
|
||||||
|
"match": {
|
||||||
|
"type": "text",
|
||||||
|
"value": "ua",
|
||||||
|
},
|
||||||
|
"start": 4,
|
||||||
|
"end": 6,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get search completion confirmation
|
||||||
|
let res = json_repl.read_json_from_stdout().await.unwrap().unwrap();
|
||||||
|
assert_eq!(res["origin_id"], id);
|
||||||
|
assert_eq!(
|
||||||
|
res["payload"],
|
||||||
|
json!({
|
||||||
|
"type": "search_done",
|
||||||
|
"id": search_id,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
Loading…
Reference in New Issue