mirror of https://github.com/chipsenkbeil/distant
Compare commits
No commits in common. 'master' and 'v0.20.0-alpha.6' have entirely different histories.
master
...
v0.20.0-al
@ -1,24 +0,0 @@
|
||||
name: 'Tag latest'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Tag latest and push
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
run: |
|
||||
git config user.name "${GITHUB_ACTOR}"
|
||||
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
|
||||
|
||||
origin_url="$(git config --get remote.origin.url)"
|
||||
origin_url="${origin_url/#https:\/\//https:\/\/$GITHUB_TOKEN@}" # add token to URL
|
||||
|
||||
git tag latest --force
|
||||
git push "$origin_url" --tags --force
|
@ -1,28 +0,0 @@
|
||||
name: 'Lock Threads'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: lock
|
||||
|
||||
jobs:
|
||||
action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v4
|
||||
with:
|
||||
issue-inactive-days: '30'
|
||||
issue-comment: >
|
||||
I'm going to lock this issue because it has been closed for _30 days_ ⏳.
|
||||
This helps our maintainers find and focus on the active issues.
|
||||
If you have found a problem that seems similar to this, please open a new
|
||||
issue and complete the issue template so we can capture all the details
|
||||
necessary to investigate further.
|
||||
process-only: 'issues'
|
@ -1,44 +0,0 @@
|
||||
[tasks.format]
|
||||
clear = true
|
||||
install_crate = "rustfmt-nightly"
|
||||
command = "cargo"
|
||||
args = ["+nightly", "fmt", "--all"]
|
||||
|
||||
[tasks.test]
|
||||
clear = true
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--all-features", "--workspace"]
|
||||
|
||||
[tasks.ci-test]
|
||||
clear = true
|
||||
command = "cargo"
|
||||
args = ["nextest", "run", "--profile", "ci", "--release", "--all-features", "--workspace"]
|
||||
|
||||
[tasks.post-ci-test]
|
||||
clear = true
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--all-features", "--workspace", "--doc"]
|
||||
|
||||
[tasks.publish]
|
||||
clear = true
|
||||
script = '''
|
||||
cargo publish --all-features -p distant-auth
|
||||
cargo publish --all-features -p distant-protocol
|
||||
cargo publish --all-features -p distant-net
|
||||
cargo publish --all-features -p distant-core
|
||||
cargo publish --all-features -p distant-local
|
||||
cargo publish --all-features -p distant-ssh2
|
||||
cargo publish --all-features
|
||||
'''
|
||||
|
||||
[tasks.dry-run-publish]
|
||||
clear = true
|
||||
script = '''
|
||||
cargo publish --all-features --dry-run -p distant-auth
|
||||
cargo publish --all-features --dry-run -p distant-protocol
|
||||
cargo publish --all-features --dry-run -p distant-net
|
||||
cargo publish --all-features --dry-run -p distant-core
|
||||
cargo publish --all-features --dry-run -p distant-local
|
||||
cargo publish --all-features --dry-run -p distant-ssh2
|
||||
cargo publish --all-features --dry-run
|
||||
'''
|
@ -1,27 +0,0 @@
|
||||
[package]
|
||||
name = "distant-auth"
|
||||
description = "Authentication library for distant, providing various implementations"
|
||||
categories = ["authentication"]
|
||||
keywords = ["auth", "authentication", "async"]
|
||||
version = "0.20.0"
|
||||
authors = ["Chip Senkbeil <chip@senkbeil.org>"]
|
||||
edition = "2021"
|
||||
homepage = "https://github.com/chipsenkbeil/distant"
|
||||
repository = "https://github.com/chipsenkbeil/distant"
|
||||
readme = "README.md"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
tests = []
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.68"
|
||||
derive_more = { version = "0.99.17", default-features = false, features = ["display", "from", "error"] }
|
||||
log = "0.4.18"
|
||||
serde = { version = "1.0.163", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.10.0"
|
||||
test-log = "0.2.11"
|
||||
tokio = { version = "1.28.2", features = ["full"] }
|
@ -1,35 +0,0 @@
|
||||
# distant auth
|
||||
|
||||
[![Crates.io][distant_crates_img]][distant_crates_lnk] [![Docs.rs][distant_doc_img]][distant_doc_lnk] [![Rustc 1.70.0][distant_rustc_img]][distant_rustc_lnk]
|
||||
|
||||
[distant_crates_img]: https://img.shields.io/crates/v/distant-auth.svg
|
||||
[distant_crates_lnk]: https://crates.io/crates/distant-auth
|
||||
[distant_doc_img]: https://docs.rs/distant-auth/badge.svg
|
||||
[distant_doc_lnk]: https://docs.rs/distant-auth
|
||||
[distant_rustc_img]: https://img.shields.io/badge/distant_auth-rustc_1.70+-lightgray.svg
|
||||
[distant_rustc_lnk]: https://blog.rust-lang.org/2023/06/01/Rust-1.70.0.html
|
||||
|
||||
## Details
|
||||
|
||||
The `distant-auth` library supplies the authentication functionality for the
|
||||
distant interfaces and distant cli.
|
||||
|
||||
## Installation
|
||||
|
||||
You can import the dependency by adding the following to your `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
distant-auth = "0.20"
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under either of
|
||||
|
||||
Apache License, Version 2.0, (LICENSE-APACHE or
|
||||
[apache-license][apache-license]) MIT license (LICENSE-MIT or
|
||||
[mit-license][mit-license]) at your option.
|
||||
|
||||
[apache-license]: http://www.apache.org/licenses/LICENSE-2.0
|
||||
[mit-license]: http://opensource.org/licenses/MIT
|
@ -1,110 +0,0 @@
|
||||
use std::io;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::handler::AuthHandler;
|
||||
use crate::msg::*;
|
||||
|
||||
/// Represents an interface for authenticating with a server.
|
||||
#[async_trait]
|
||||
pub trait Authenticate {
|
||||
/// Performs authentication by leveraging the `handler` for any received challenge.
|
||||
async fn authenticate(&mut self, mut handler: impl AuthHandler + Send) -> io::Result<()>;
|
||||
}
|
||||
|
||||
/// Represents an interface for submitting challenges for authentication.
|
||||
#[async_trait]
|
||||
pub trait Authenticator: Send {
|
||||
/// Issues an initialization notice and returns the response indicating which authentication
|
||||
/// methods to pursue
|
||||
async fn initialize(
|
||||
&mut self,
|
||||
initialization: Initialization,
|
||||
) -> io::Result<InitializationResponse>;
|
||||
|
||||
/// Issues a challenge and returns the answers to the `questions` asked.
|
||||
async fn challenge(&mut self, challenge: Challenge) -> io::Result<ChallengeResponse>;
|
||||
|
||||
/// Requests verification of some `kind` and `text`, returning true if passed verification.
|
||||
async fn verify(&mut self, verification: Verification) -> io::Result<VerificationResponse>;
|
||||
|
||||
/// Reports information with no response expected.
|
||||
async fn info(&mut self, info: Info) -> io::Result<()>;
|
||||
|
||||
/// Reports an error occurred during authentication, consuming the authenticator since no more
|
||||
/// challenges should be issued.
|
||||
async fn error(&mut self, error: Error) -> io::Result<()>;
|
||||
|
||||
/// Reports that the authentication has started for a specific method.
|
||||
async fn start_method(&mut self, start_method: StartMethod) -> io::Result<()>;
|
||||
|
||||
/// Reports that the authentication has finished successfully, consuming the authenticator
|
||||
/// since no more challenges should be issued.
|
||||
async fn finished(&mut self) -> io::Result<()>;
|
||||
}
|
||||
|
||||
/// Represents an implementator of [`Authenticator`] used purely for testing purposes.
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub struct TestAuthenticator {
|
||||
pub initialize: Box<dyn FnMut(Initialization) -> io::Result<InitializationResponse> + Send>,
|
||||
pub challenge: Box<dyn FnMut(Challenge) -> io::Result<ChallengeResponse> + Send>,
|
||||
pub verify: Box<dyn FnMut(Verification) -> io::Result<VerificationResponse> + Send>,
|
||||
pub info: Box<dyn FnMut(Info) -> io::Result<()> + Send>,
|
||||
pub error: Box<dyn FnMut(Error) -> io::Result<()> + Send>,
|
||||
pub start_method: Box<dyn FnMut(StartMethod) -> io::Result<()> + Send>,
|
||||
pub finished: Box<dyn FnMut() -> io::Result<()> + Send>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
impl Default for TestAuthenticator {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
initialize: Box::new(|x| Ok(InitializationResponse { methods: x.methods })),
|
||||
challenge: Box::new(|x| {
|
||||
Ok(ChallengeResponse {
|
||||
answers: x.questions.into_iter().map(|x| x.text).collect(),
|
||||
})
|
||||
}),
|
||||
verify: Box::new(|_| Ok(VerificationResponse { valid: true })),
|
||||
info: Box::new(|_| Ok(())),
|
||||
error: Box::new(|_| Ok(())),
|
||||
start_method: Box::new(|_| Ok(())),
|
||||
finished: Box::new(|| Ok(())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
#[async_trait]
|
||||
impl Authenticator for TestAuthenticator {
|
||||
async fn initialize(
|
||||
&mut self,
|
||||
initialization: Initialization,
|
||||
) -> io::Result<InitializationResponse> {
|
||||
(self.initialize)(initialization)
|
||||
}
|
||||
|
||||
async fn challenge(&mut self, challenge: Challenge) -> io::Result<ChallengeResponse> {
|
||||
(self.challenge)(challenge)
|
||||
}
|
||||
|
||||
async fn verify(&mut self, verification: Verification) -> io::Result<VerificationResponse> {
|
||||
(self.verify)(verification)
|
||||
}
|
||||
|
||||
async fn info(&mut self, info: Info) -> io::Result<()> {
|
||||
(self.info)(info)
|
||||
}
|
||||
|
||||
async fn error(&mut self, error: Error) -> io::Result<()> {
|
||||
(self.error)(error)
|
||||
}
|
||||
|
||||
async fn start_method(&mut self, start_method: StartMethod) -> io::Result<()> {
|
||||
(self.start_method)(start_method)
|
||||
}
|
||||
|
||||
async fn finished(&mut self) -> io::Result<()> {
|
||||
(self.finished)()
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
#[doc = include_str!("../README.md")]
|
||||
#[cfg(doctest)]
|
||||
pub struct ReadmeDoctests;
|
||||
|
||||
mod authenticator;
|
||||
mod handler;
|
||||
mod methods;
|
||||
pub mod msg;
|
||||
|
||||
pub use authenticator::*;
|
||||
pub use handler::*;
|
||||
pub use methods::*;
|
||||
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub mod tests {
|
||||
pub use crate::{TestAuthHandler, TestAuthenticator};
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
use std::io;
|
||||
use std::str::FromStr;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::authenticator::Authenticator;
|
||||
use crate::methods::AuthenticationMethod;
|
||||
use crate::msg::{Challenge, Error, Question};
|
||||
|
||||
/// Authenticaton method for a static secret key
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct StaticKeyAuthenticationMethod<T> {
|
||||
key: T,
|
||||
}
|
||||
|
||||
impl<T> StaticKeyAuthenticationMethod<T> {
|
||||
pub const ID: &str = "static_key";
|
||||
|
||||
#[inline]
|
||||
pub fn new(key: T) -> Self {
|
||||
Self { key }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<T> AuthenticationMethod for StaticKeyAuthenticationMethod<T>
|
||||
where
|
||||
T: FromStr + PartialEq + Send + Sync,
|
||||
{
|
||||
fn id(&self) -> &'static str {
|
||||
Self::ID
|
||||
}
|
||||
|
||||
async fn authenticate(&self, authenticator: &mut dyn Authenticator) -> io::Result<()> {
|
||||
let response = authenticator
|
||||
.challenge(Challenge {
|
||||
questions: vec![Question {
|
||||
label: "key".to_string(),
|
||||
text: "Provide a key: ".to_string(),
|
||||
options: Default::default(),
|
||||
}],
|
||||
options: Default::default(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
if response.answers.is_empty() {
|
||||
return Err(Error::non_fatal("missing answer").into_io_permission_denied());
|
||||
}
|
||||
|
||||
match response.answers.into_iter().next().unwrap().parse::<T>() {
|
||||
Ok(key) if key == self.key => Ok(()),
|
||||
_ => Err(Error::non_fatal("answer does not match key").into_io_permission_denied()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_log::test;
|
||||
|
||||
use super::*;
|
||||
use crate::authenticator::TestAuthenticator;
|
||||
use crate::msg::*;
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_fail_if_key_challenge_fails() {
|
||||
let method = StaticKeyAuthenticationMethod::new(String::new());
|
||||
|
||||
let mut authenticator = TestAuthenticator {
|
||||
challenge: Box::new(|_| Err(io::Error::new(io::ErrorKind::InvalidData, "test error"))),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let err = method.authenticate(&mut authenticator).await.unwrap_err();
|
||||
|
||||
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
|
||||
assert_eq!(err.to_string(), "test error");
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_fail_if_no_answer_included_in_challenge_response() {
|
||||
let method = StaticKeyAuthenticationMethod::new(String::new());
|
||||
|
||||
let mut authenticator = TestAuthenticator {
|
||||
challenge: Box::new(|_| {
|
||||
Ok(ChallengeResponse {
|
||||
answers: Vec::new(),
|
||||
})
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let err = method.authenticate(&mut authenticator).await.unwrap_err();
|
||||
|
||||
assert_eq!(err.kind(), io::ErrorKind::PermissionDenied);
|
||||
assert_eq!(err.to_string(), "Error: missing answer");
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_fail_if_answer_does_not_match_key() {
|
||||
let method = StaticKeyAuthenticationMethod::new(String::from("answer"));
|
||||
|
||||
let mut authenticator = TestAuthenticator {
|
||||
challenge: Box::new(|_| {
|
||||
Ok(ChallengeResponse {
|
||||
answers: vec![String::from("other")],
|
||||
})
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let err = method.authenticate(&mut authenticator).await.unwrap_err();
|
||||
|
||||
assert_eq!(err.kind(), io::ErrorKind::PermissionDenied);
|
||||
assert_eq!(err.to_string(), "Error: answer does not match key");
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_succeed_if_answer_matches_key() {
|
||||
let method = StaticKeyAuthenticationMethod::new(String::from("answer"));
|
||||
|
||||
let mut authenticator = TestAuthenticator {
|
||||
challenge: Box::new(|_| {
|
||||
Ok(ChallengeResponse {
|
||||
answers: vec![String::from("answer")],
|
||||
})
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
method.authenticate(&mut authenticator).await.unwrap();
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,10 +1,11 @@
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
|
||||
use distant_core::protocol::{ProcessId, PtySize};
|
||||
use tokio::io;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::protocol::{ProcessId, PtySize};
|
||||
|
||||
mod pty;
|
||||
pub use pty::*;
|
||||
|
@ -0,0 +1,553 @@
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use derive_more::{From, IsVariant};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{AsRefStr, EnumDiscriminants, EnumIter, EnumMessage, EnumString};
|
||||
|
||||
mod capabilities;
|
||||
pub use capabilities::*;
|
||||
|
||||
mod change;
|
||||
pub use change::*;
|
||||
|
||||
mod cmd;
|
||||
pub use cmd::*;
|
||||
|
||||
mod error;
|
||||
pub use error::*;
|
||||
|
||||
mod filesystem;
|
||||
pub use filesystem::*;
|
||||
|
||||
mod metadata;
|
||||
pub use metadata::*;
|
||||
|
||||
mod pty;
|
||||
pub use pty::*;
|
||||
|
||||
mod search;
|
||||
pub use search::*;
|
||||
|
||||
mod system;
|
||||
pub use system::*;
|
||||
|
||||
mod utils;
|
||||
pub(crate) use utils::*;
|
||||
|
||||
/// Id for a remote process
|
||||
pub type ProcessId = u32;
|
||||
|
||||
/// Mapping of environment variables
|
||||
pub type Environment = distant_net::common::Map;
|
||||
|
||||
/// Represents a wrapper around a distant message, supporting single and batch requests
|
||||
#[derive(Clone, Debug, From, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(untagged)]
|
||||
pub enum Msg<T> {
|
||||
Single(T),
|
||||
Batch(Vec<T>),
|
||||
}
|
||||
|
||||
impl<T> Msg<T> {
|
||||
/// Returns true if msg has a single payload
|
||||
pub fn is_single(&self) -> bool {
|
||||
matches!(self, Self::Single(_))
|
||||
}
|
||||
|
||||
/// Returns reference to single value if msg is single variant
|
||||
pub fn as_single(&self) -> Option<&T> {
|
||||
match self {
|
||||
Self::Single(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns mutable reference to single value if msg is single variant
|
||||
pub fn as_mut_single(&mut self) -> Option<&T> {
|
||||
match self {
|
||||
Self::Single(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the single value if msg is single variant
|
||||
pub fn into_single(self) -> Option<T> {
|
||||
match self {
|
||||
Self::Single(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if msg has a batch of payloads
|
||||
pub fn is_batch(&self) -> bool {
|
||||
matches!(self, Self::Batch(_))
|
||||
}
|
||||
|
||||
/// Returns reference to batch value if msg is batch variant
|
||||
pub fn as_batch(&self) -> Option<&[T]> {
|
||||
match self {
|
||||
Self::Batch(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns mutable reference to batch value if msg is batch variant
|
||||
pub fn as_mut_batch(&mut self) -> Option<&mut [T]> {
|
||||
match self {
|
||||
Self::Batch(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the batch value if msg is batch variant
|
||||
pub fn into_batch(self) -> Option<Vec<T>> {
|
||||
match self {
|
||||
Self::Batch(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert into a collection of payload data
|
||||
pub fn into_vec(self) -> Vec<T> {
|
||||
match self {
|
||||
Self::Single(x) => vec![x],
|
||||
Self::Batch(x) => x,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl<T: schemars::JsonSchema> Msg<T> {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Msg<T>)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the payload of a request to be performed on the remote machine
|
||||
#[derive(Clone, Debug, PartialEq, Eq, EnumDiscriminants, IsVariant, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[strum_discriminants(derive(
|
||||
AsRefStr,
|
||||
strum::Display,
|
||||
EnumIter,
|
||||
EnumMessage,
|
||||
EnumString,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
IsVariant,
|
||||
Serialize,
|
||||
Deserialize
|
||||
))]
|
||||
#[cfg_attr(
|
||||
feature = "schemars",
|
||||
strum_discriminants(derive(schemars::JsonSchema))
|
||||
)]
|
||||
#[strum_discriminants(name(CapabilityKind))]
|
||||
#[strum_discriminants(strum(serialize_all = "snake_case"))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
||||
pub enum Request {
|
||||
/// Retrieve information about the server's capabilities
|
||||
#[strum_discriminants(strum(message = "Supports retrieving capabilities"))]
|
||||
Capabilities {},
|
||||
|
||||
/// Reads a file from the specified path on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports reading binary file"))]
|
||||
FileRead {
|
||||
/// The path to the file on the remote machine
|
||||
path: PathBuf,
|
||||
},
|
||||
|
||||
/// Reads a file from the specified path on the remote machine
|
||||
/// and treats the contents as text
|
||||
#[strum_discriminants(strum(message = "Supports reading text file"))]
|
||||
FileReadText {
|
||||
/// The path to the file on the remote machine
|
||||
path: PathBuf,
|
||||
},
|
||||
|
||||
/// Writes a file, creating it if it does not exist, and overwriting any existing content
|
||||
/// on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports writing binary file"))]
|
||||
FileWrite {
|
||||
/// The path to the file on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Data for server-side writing of content
|
||||
#[serde(with = "serde_bytes")]
|
||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
||||
data: Vec<u8>,
|
||||
},
|
||||
|
||||
/// Writes a file using text instead of bytes, creating it if it does not exist,
|
||||
/// and overwriting any existing content on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports writing text file"))]
|
||||
FileWriteText {
|
||||
/// The path to the file on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Data for server-side writing of content
|
||||
text: String,
|
||||
},
|
||||
|
||||
/// Appends to a file, creating it if it does not exist, on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports appending to binary file"))]
|
||||
FileAppend {
|
||||
/// The path to the file on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Data for server-side writing of content
|
||||
#[serde(with = "serde_bytes")]
|
||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
||||
data: Vec<u8>,
|
||||
},
|
||||
|
||||
/// Appends text to a file, creating it if it does not exist, on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports appending to text file"))]
|
||||
FileAppendText {
|
||||
/// The path to the file on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Data for server-side writing of content
|
||||
text: String,
|
||||
},
|
||||
|
||||
/// Reads a directory from the specified path on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports reading directory"))]
|
||||
DirRead {
|
||||
/// The path to the directory on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Maximum depth to traverse with 0 indicating there is no maximum
|
||||
/// depth and 1 indicating the most immediate children within the
|
||||
/// directory
|
||||
#[serde(default = "one")]
|
||||
depth: usize,
|
||||
|
||||
/// Whether or not to return absolute or relative paths
|
||||
#[serde(default)]
|
||||
absolute: bool,
|
||||
|
||||
/// Whether or not to canonicalize the resulting paths, meaning
|
||||
/// returning the canonical, absolute form of a path with all
|
||||
/// intermediate components normalized and symbolic links resolved
|
||||
///
|
||||
/// Note that the flag absolute must be true to have absolute paths
|
||||
/// returned, even if canonicalize is flagged as true
|
||||
#[serde(default)]
|
||||
canonicalize: bool,
|
||||
|
||||
/// Whether or not to include the root directory in the retrieved
|
||||
/// entries
|
||||
///
|
||||
/// If included, the root directory will also be a canonicalized,
|
||||
/// absolute path and will not follow any of the other flags
|
||||
#[serde(default)]
|
||||
include_root: bool,
|
||||
},
|
||||
|
||||
/// Creates a directory on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports creating directory"))]
|
||||
DirCreate {
|
||||
/// The path to the directory on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Whether or not to create all parent directories
|
||||
#[serde(default)]
|
||||
all: bool,
|
||||
},
|
||||
|
||||
/// Removes a file or directory on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports removing files, directories, and symlinks"))]
|
||||
Remove {
|
||||
/// The path to the file or directory on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Whether or not to remove all contents within directory if is a directory.
|
||||
/// Does nothing different for files
|
||||
#[serde(default)]
|
||||
force: bool,
|
||||
},
|
||||
|
||||
/// Copies a file or directory on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports copying files, directories, and symlinks"))]
|
||||
Copy {
|
||||
/// The path to the file or directory on the remote machine
|
||||
src: PathBuf,
|
||||
|
||||
/// New location on the remote machine for copy of file or directory
|
||||
dst: PathBuf,
|
||||
},
|
||||
|
||||
/// Moves/renames a file or directory on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports renaming files, directories, and symlinks"))]
|
||||
Rename {
|
||||
/// The path to the file or directory on the remote machine
|
||||
src: PathBuf,
|
||||
|
||||
/// New location on the remote machine for the file or directory
|
||||
dst: PathBuf,
|
||||
},
|
||||
|
||||
/// Watches a path for changes
|
||||
#[strum_discriminants(strum(message = "Supports watching filesystem for changes"))]
|
||||
Watch {
|
||||
/// The path to the file, directory, or symlink on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// If true, will recursively watch for changes within directories, othewise
|
||||
/// will only watch for changes immediately within directories
|
||||
#[serde(default)]
|
||||
recursive: bool,
|
||||
|
||||
/// Filter to only report back specified changes
|
||||
#[serde(default)]
|
||||
only: Vec<ChangeKind>,
|
||||
|
||||
/// Filter to report back changes except these specified changes
|
||||
#[serde(default)]
|
||||
except: Vec<ChangeKind>,
|
||||
},
|
||||
|
||||
/// Unwatches a path for changes, meaning no additional changes will be reported
|
||||
#[strum_discriminants(strum(message = "Supports unwatching filesystem for changes"))]
|
||||
Unwatch {
|
||||
/// The path to the file, directory, or symlink on the remote machine
|
||||
path: PathBuf,
|
||||
},
|
||||
|
||||
/// Checks whether the given path exists
|
||||
#[strum_discriminants(strum(message = "Supports checking if a path exists"))]
|
||||
Exists {
|
||||
/// The path to the file or directory on the remote machine
|
||||
path: PathBuf,
|
||||
},
|
||||
|
||||
/// Retrieves filesystem metadata for the specified path on the remote machine
|
||||
#[strum_discriminants(strum(
|
||||
message = "Supports retrieving metadata about a file, directory, or symlink"
|
||||
))]
|
||||
Metadata {
|
||||
/// The path to the file, directory, or symlink on the remote machine
|
||||
path: PathBuf,
|
||||
|
||||
/// Whether or not to include a canonicalized version of the path, meaning
|
||||
/// returning the canonical, absolute form of a path with all
|
||||
/// intermediate components normalized and symbolic links resolved
|
||||
#[serde(default)]
|
||||
canonicalize: bool,
|
||||
|
||||
/// Whether or not to follow symlinks to determine absolute file type (dir/file)
|
||||
#[serde(default)]
|
||||
resolve_file_type: bool,
|
||||
},
|
||||
|
||||
/// Searches filesystem using the provided query
|
||||
#[strum_discriminants(strum(message = "Supports searching filesystem using queries"))]
|
||||
Search {
|
||||
/// Query to perform against the filesystem
|
||||
query: SearchQuery,
|
||||
},
|
||||
|
||||
/// Cancels an active search being run against the filesystem
|
||||
#[strum_discriminants(strum(
|
||||
message = "Supports canceling an active search against the filesystem"
|
||||
))]
|
||||
CancelSearch {
|
||||
/// Id of the search to cancel
|
||||
id: SearchId,
|
||||
},
|
||||
|
||||
/// Spawns a new process on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports spawning a process"))]
|
||||
ProcSpawn {
|
||||
/// The full command to run including arguments
|
||||
cmd: Cmd,
|
||||
|
||||
/// Environment to provide to the remote process
|
||||
#[serde(default)]
|
||||
environment: Environment,
|
||||
|
||||
/// Alternative current directory for the remote process
|
||||
#[serde(default)]
|
||||
current_dir: Option<PathBuf>,
|
||||
|
||||
/// If provided, will spawn process in a pty, otherwise spawns directly
|
||||
#[serde(default)]
|
||||
pty: Option<PtySize>,
|
||||
},
|
||||
|
||||
/// Kills a process running on the remote machine
|
||||
#[strum_discriminants(strum(message = "Supports killing a spawned process"))]
|
||||
ProcKill {
|
||||
/// Id of the actively-running process
|
||||
id: ProcessId,
|
||||
},
|
||||
|
||||
/// Sends additional data to stdin of running process
|
||||
#[strum_discriminants(strum(message = "Supports sending stdin to a spawned process"))]
|
||||
ProcStdin {
|
||||
/// Id of the actively-running process to send stdin data
|
||||
id: ProcessId,
|
||||
|
||||
/// Data to send to a process's stdin pipe
|
||||
#[serde(with = "serde_bytes")]
|
||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
||||
data: Vec<u8>,
|
||||
},
|
||||
|
||||
/// Resize pty of remote process
|
||||
#[strum_discriminants(strum(message = "Supports resizing the pty of a spawned process"))]
|
||||
ProcResizePty {
|
||||
/// Id of the actively-running process whose pty to resize
|
||||
id: ProcessId,
|
||||
|
||||
/// The new pty dimensions
|
||||
size: PtySize,
|
||||
},
|
||||
|
||||
/// Retrieve information about the server and the system it is on
|
||||
#[strum_discriminants(strum(message = "Supports retrieving system information"))]
|
||||
SystemInfo {},
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Request {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Request)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the payload of a successful response
|
||||
#[derive(Clone, Debug, PartialEq, Eq, AsRefStr, IsVariant, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum Response {
|
||||
/// General okay with no extra data, returned in cases like
|
||||
/// creating or removing a directory, copying a file, or renaming
|
||||
/// a file
|
||||
Ok,
|
||||
|
||||
/// General-purpose failure that occurred from some request
|
||||
Error(Error),
|
||||
|
||||
/// Response containing some arbitrary, binary data
|
||||
Blob {
|
||||
/// Binary data associated with the response
|
||||
#[serde(with = "serde_bytes")]
|
||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
||||
data: Vec<u8>,
|
||||
},
|
||||
|
||||
/// Response containing some arbitrary, text data
|
||||
Text {
|
||||
/// Text data associated with the response
|
||||
data: String,
|
||||
},
|
||||
|
||||
/// Response to reading a directory
|
||||
DirEntries {
|
||||
/// Entries contained within the requested directory
|
||||
entries: Vec<DirEntry>,
|
||||
|
||||
/// Errors encountered while scanning for entries
|
||||
errors: Vec<Error>,
|
||||
},
|
||||
|
||||
/// Response to a filesystem change for some watched file, directory, or symlink
|
||||
Changed(Change),
|
||||
|
||||
/// Response to checking if a path exists
|
||||
Exists { value: bool },
|
||||
|
||||
/// Represents metadata about some filesystem object (file, directory, symlink) on remote machine
|
||||
Metadata(Metadata),
|
||||
|
||||
/// Represents a search being started
|
||||
SearchStarted {
|
||||
/// Arbitrary id associated with search
|
||||
id: SearchId,
|
||||
},
|
||||
|
||||
/// Represents some subset of results for a search query (may not be all of them)
|
||||
SearchResults {
|
||||
/// Arbitrary id associated with search
|
||||
id: SearchId,
|
||||
|
||||
/// Collection of matches from performing a query
|
||||
matches: Vec<SearchQueryMatch>,
|
||||
},
|
||||
|
||||
/// Represents a search being completed
|
||||
SearchDone {
|
||||
/// Arbitrary id associated with search
|
||||
id: SearchId,
|
||||
},
|
||||
|
||||
/// Response to starting a new process
|
||||
ProcSpawned {
|
||||
/// Arbitrary id associated with running process
|
||||
id: ProcessId,
|
||||
},
|
||||
|
||||
/// Actively-transmitted stdout as part of running process
|
||||
ProcStdout {
|
||||
/// Arbitrary id associated with running process
|
||||
id: ProcessId,
|
||||
|
||||
/// Data read from a process' stdout pipe
|
||||
#[serde(with = "serde_bytes")]
|
||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
||||
data: Vec<u8>,
|
||||
},
|
||||
|
||||
/// Actively-transmitted stderr as part of running process
|
||||
ProcStderr {
|
||||
/// Arbitrary id associated with running process
|
||||
id: ProcessId,
|
||||
|
||||
/// Data read from a process' stderr pipe
|
||||
#[serde(with = "serde_bytes")]
|
||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
||||
data: Vec<u8>,
|
||||
},
|
||||
|
||||
/// Response to a process finishing
|
||||
ProcDone {
|
||||
/// Arbitrary id associated with running process
|
||||
id: ProcessId,
|
||||
|
||||
/// Whether or not termination was successful
|
||||
success: bool,
|
||||
|
||||
/// Exit code associated with termination, will be missing if terminated by signal
|
||||
code: Option<i32>,
|
||||
},
|
||||
|
||||
/// Response to retrieving information about the server and the system it is on
|
||||
SystemInfo(SystemInfo),
|
||||
|
||||
/// Response to retrieving information about the server's capabilities
|
||||
Capabilities { supported: Capabilities },
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Response {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Response)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for Response {
|
||||
fn from(x: io::Error) -> Self {
|
||||
Self::Error(Error::from(x))
|
||||
}
|
||||
}
|
||||
|
||||
/// Used to provide a default serde value of 1
|
||||
const fn one() -> usize {
|
||||
1
|
||||
}
|
@ -0,0 +1,207 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashSet;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{BitAnd, BitOr, BitXor};
|
||||
use std::str::FromStr;
|
||||
|
||||
use derive_more::{From, Into, IntoIterator};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumMessage, IntoEnumIterator};
|
||||
|
||||
use super::CapabilityKind;
|
||||
|
||||
/// Set of supported capabilities for a server
|
||||
#[derive(Clone, Debug, From, Into, PartialEq, Eq, IntoIterator, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(transparent)]
|
||||
pub struct Capabilities(#[into_iterator(owned, ref)] HashSet<Capability>);
|
||||
|
||||
impl Capabilities {
|
||||
/// Return set of capabilities encompassing all possible capabilities
|
||||
pub fn all() -> Self {
|
||||
Self(CapabilityKind::iter().map(Capability::from).collect())
|
||||
}
|
||||
|
||||
/// Return empty set of capabilities
|
||||
pub fn none() -> Self {
|
||||
Self(HashSet::new())
|
||||
}
|
||||
|
||||
/// Returns true if the capability with described kind is included
|
||||
pub fn contains(&self, kind: impl AsRef<str>) -> bool {
|
||||
let cap = Capability {
|
||||
kind: kind.as_ref().to_string(),
|
||||
description: String::new(),
|
||||
};
|
||||
self.0.contains(&cap)
|
||||
}
|
||||
|
||||
/// Adds the specified capability to the set of capabilities
|
||||
///
|
||||
/// * If the set did not have this capability, returns `true`
|
||||
/// * If the set did have this capability, returns `false`
|
||||
pub fn insert(&mut self, cap: impl Into<Capability>) -> bool {
|
||||
self.0.insert(cap.into())
|
||||
}
|
||||
|
||||
/// Removes the capability with the described kind, returning the capability
|
||||
pub fn take(&mut self, kind: impl AsRef<str>) -> Option<Capability> {
|
||||
let cap = Capability {
|
||||
kind: kind.as_ref().to_string(),
|
||||
description: String::new(),
|
||||
};
|
||||
self.0.take(&cap)
|
||||
}
|
||||
|
||||
/// Removes the capability with the described kind, returning true if it existed
|
||||
pub fn remove(&mut self, kind: impl AsRef<str>) -> bool {
|
||||
let cap = Capability {
|
||||
kind: kind.as_ref().to_string(),
|
||||
description: String::new(),
|
||||
};
|
||||
self.0.remove(&cap)
|
||||
}
|
||||
|
||||
/// Converts into vec of capabilities sorted by kind
|
||||
pub fn into_sorted_vec(self) -> Vec<Capability> {
|
||||
let mut this = self.0.into_iter().collect::<Vec<_>>();
|
||||
|
||||
this.sort_unstable();
|
||||
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Capabilities {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Capabilities)
|
||||
}
|
||||
}
|
||||
|
||||
impl BitAnd for &Capabilities {
|
||||
type Output = Capabilities;
|
||||
|
||||
fn bitand(self, rhs: Self) -> Self::Output {
|
||||
Capabilities(self.0.bitand(&rhs.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr for &Capabilities {
|
||||
type Output = Capabilities;
|
||||
|
||||
fn bitor(self, rhs: Self) -> Self::Output {
|
||||
Capabilities(self.0.bitor(&rhs.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<Capability> for &Capabilities {
|
||||
type Output = Capabilities;
|
||||
|
||||
fn bitor(self, rhs: Capability) -> Self::Output {
|
||||
let mut other = Capabilities::none();
|
||||
other.0.insert(rhs);
|
||||
|
||||
self.bitor(&other)
|
||||
}
|
||||
}
|
||||
|
||||
impl BitXor for &Capabilities {
|
||||
type Output = Capabilities;
|
||||
|
||||
fn bitxor(self, rhs: Self) -> Self::Output {
|
||||
Capabilities(self.0.bitxor(&rhs.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<Capability> for Capabilities {
|
||||
fn from_iter<I: IntoIterator<Item = Capability>>(iter: I) -> Self {
|
||||
let mut this = Capabilities::none();
|
||||
|
||||
for capability in iter {
|
||||
this.0.insert(capability);
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
/// Capability tied to a server. A capability is equivalent based on its kind and not description.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
pub struct Capability {
|
||||
/// Label describing the kind of capability
|
||||
pub kind: String,
|
||||
|
||||
/// Information about the capability
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
impl Capability {
|
||||
/// Will convert the [`Capability`]'s `kind` into a known [`CapabilityKind`] if possible,
|
||||
/// returning None if the capability is unknown
|
||||
pub fn to_capability_kind(&self) -> Option<CapabilityKind> {
|
||||
CapabilityKind::from_str(&self.kind).ok()
|
||||
}
|
||||
|
||||
/// Returns true if the described capability is unknown
|
||||
pub fn is_unknown(&self) -> bool {
|
||||
self.to_capability_kind().is_none()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Capability {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.kind.eq_ignore_ascii_case(&other.kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Capability {}
|
||||
|
||||
impl PartialOrd for Capability {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Capability {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.kind
|
||||
.to_ascii_lowercase()
|
||||
.cmp(&other.kind.to_ascii_lowercase())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Capability {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.kind.to_ascii_lowercase().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CapabilityKind> for Capability {
|
||||
/// Creates a new capability using the kind's default message
|
||||
fn from(kind: CapabilityKind) -> Self {
|
||||
Self {
|
||||
kind: kind.to_string(),
|
||||
description: kind
|
||||
.get_message()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Capability {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Capability)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl CapabilityKind {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(CapabilityKind)
|
||||
}
|
||||
}
|
@ -0,0 +1,516 @@
|
||||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::{BitOr, Sub};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use derive_more::{Deref, DerefMut, IntoIterator};
|
||||
use notify::event::Event as NotifyEvent;
|
||||
use notify::EventKind as NotifyEventKind;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumString, EnumVariantNames, VariantNames};
|
||||
|
||||
/// Change to one or more paths on the filesystem
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
pub struct Change {
|
||||
/// Label describing the kind of change
|
||||
pub kind: ChangeKind,
|
||||
|
||||
/// Paths that were changed
|
||||
pub paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Change {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Change)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NotifyEvent> for Change {
|
||||
fn from(x: NotifyEvent) -> Self {
|
||||
Self {
|
||||
kind: x.kind.into(),
|
||||
paths: x.paths,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy,
|
||||
Clone,
|
||||
Debug,
|
||||
strum::Display,
|
||||
EnumString,
|
||||
EnumVariantNames,
|
||||
Hash,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum ChangeKind {
|
||||
/// Something about a file or directory was accessed, but
|
||||
/// no specific details were known
|
||||
Access,
|
||||
|
||||
/// A file was closed for executing
|
||||
AccessCloseExecute,
|
||||
|
||||
/// A file was closed for reading
|
||||
AccessCloseRead,
|
||||
|
||||
/// A file was closed for writing
|
||||
AccessCloseWrite,
|
||||
|
||||
/// A file was opened for executing
|
||||
AccessOpenExecute,
|
||||
|
||||
/// A file was opened for reading
|
||||
AccessOpenRead,
|
||||
|
||||
/// A file was opened for writing
|
||||
AccessOpenWrite,
|
||||
|
||||
/// A file or directory was read
|
||||
AccessRead,
|
||||
|
||||
/// The access time of a file or directory was changed
|
||||
AccessTime,
|
||||
|
||||
/// A file, directory, or something else was created
|
||||
Create,
|
||||
|
||||
/// The content of a file or directory changed
|
||||
Content,
|
||||
|
||||
/// The data of a file or directory was modified, but
|
||||
/// no specific details were known
|
||||
Data,
|
||||
|
||||
/// The metadata of a file or directory was modified, but
|
||||
/// no specific details were known
|
||||
Metadata,
|
||||
|
||||
/// Something about a file or directory was modified, but
|
||||
/// no specific details were known
|
||||
Modify,
|
||||
|
||||
/// A file, directory, or something else was removed
|
||||
Remove,
|
||||
|
||||
/// A file or directory was renamed, but no specific details were known
|
||||
Rename,
|
||||
|
||||
/// A file or directory was renamed, and the provided paths
|
||||
/// are the source and target in that order (from, to)
|
||||
RenameBoth,
|
||||
|
||||
/// A file or directory was renamed, and the provided path
|
||||
/// is the origin of the rename (before being renamed)
|
||||
RenameFrom,
|
||||
|
||||
/// A file or directory was renamed, and the provided path
|
||||
/// is the result of the rename
|
||||
RenameTo,
|
||||
|
||||
/// A file's size changed
|
||||
Size,
|
||||
|
||||
/// The ownership of a file or directory was changed
|
||||
Ownership,
|
||||
|
||||
/// The permissions of a file or directory was changed
|
||||
Permissions,
|
||||
|
||||
/// The write or modify time of a file or directory was changed
|
||||
WriteTime,
|
||||
|
||||
// Catchall in case we have no insight as to the type of change
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl ChangeKind {
|
||||
/// Returns a list of all variants as str names
|
||||
pub const fn variants() -> &'static [&'static str] {
|
||||
Self::VARIANTS
|
||||
}
|
||||
|
||||
/// Returns a list of all variants as a vec
|
||||
pub fn all() -> Vec<ChangeKind> {
|
||||
ChangeKindSet::all().into_sorted_vec()
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of access
|
||||
pub fn is_access_kind(&self) -> bool {
|
||||
self.is_open_access_kind()
|
||||
|| self.is_close_access_kind()
|
||||
|| matches!(self, Self::Access | Self::AccessRead)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of open access
|
||||
pub fn is_open_access_kind(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::AccessOpenExecute | Self::AccessOpenRead | Self::AccessOpenWrite
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of close access
|
||||
pub fn is_close_access_kind(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::AccessCloseExecute | Self::AccessCloseRead | Self::AccessCloseWrite
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of creation
|
||||
pub fn is_create_kind(&self) -> bool {
|
||||
matches!(self, Self::Create)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of modification
|
||||
pub fn is_modify_kind(&self) -> bool {
|
||||
self.is_data_modify_kind() || self.is_metadata_modify_kind() || matches!(self, Self::Modify)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of data modification
|
||||
pub fn is_data_modify_kind(&self) -> bool {
|
||||
matches!(self, Self::Content | Self::Data | Self::Size)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of metadata modification
|
||||
pub fn is_metadata_modify_kind(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::AccessTime
|
||||
| Self::Metadata
|
||||
| Self::Ownership
|
||||
| Self::Permissions
|
||||
| Self::WriteTime
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of rename
|
||||
pub fn is_rename_kind(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Rename | Self::RenameBoth | Self::RenameFrom | Self::RenameTo
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if the change is a kind of removal
|
||||
pub fn is_remove_kind(&self) -> bool {
|
||||
matches!(self, Self::Remove)
|
||||
}
|
||||
|
||||
/// Returns true if the change kind is unknown
|
||||
pub fn is_unknown_kind(&self) -> bool {
|
||||
matches!(self, Self::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl ChangeKind {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(ChangeKind)
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr for ChangeKind {
|
||||
type Output = ChangeKindSet;
|
||||
|
||||
fn bitor(self, rhs: Self) -> Self::Output {
|
||||
let mut set = ChangeKindSet::empty();
|
||||
set.insert(self);
|
||||
set.insert(rhs);
|
||||
set
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NotifyEventKind> for ChangeKind {
|
||||
fn from(x: NotifyEventKind) -> Self {
|
||||
use notify::event::{
|
||||
AccessKind, AccessMode, DataChange, MetadataKind, ModifyKind, RenameMode,
|
||||
};
|
||||
match x {
|
||||
// File/directory access events
|
||||
NotifyEventKind::Access(AccessKind::Read) => Self::AccessRead,
|
||||
NotifyEventKind::Access(AccessKind::Open(AccessMode::Execute)) => {
|
||||
Self::AccessOpenExecute
|
||||
}
|
||||
NotifyEventKind::Access(AccessKind::Open(AccessMode::Read)) => Self::AccessOpenRead,
|
||||
NotifyEventKind::Access(AccessKind::Open(AccessMode::Write)) => Self::AccessOpenWrite,
|
||||
NotifyEventKind::Access(AccessKind::Close(AccessMode::Execute)) => {
|
||||
Self::AccessCloseExecute
|
||||
}
|
||||
NotifyEventKind::Access(AccessKind::Close(AccessMode::Read)) => Self::AccessCloseRead,
|
||||
NotifyEventKind::Access(AccessKind::Close(AccessMode::Write)) => Self::AccessCloseWrite,
|
||||
NotifyEventKind::Access(_) => Self::Access,
|
||||
|
||||
// File/directory creation events
|
||||
NotifyEventKind::Create(_) => Self::Create,
|
||||
|
||||
// Rename-oriented events
|
||||
NotifyEventKind::Modify(ModifyKind::Name(RenameMode::Both)) => Self::RenameBoth,
|
||||
NotifyEventKind::Modify(ModifyKind::Name(RenameMode::From)) => Self::RenameFrom,
|
||||
NotifyEventKind::Modify(ModifyKind::Name(RenameMode::To)) => Self::RenameTo,
|
||||
NotifyEventKind::Modify(ModifyKind::Name(_)) => Self::Rename,
|
||||
|
||||
// Data-modification events
|
||||
NotifyEventKind::Modify(ModifyKind::Data(DataChange::Content)) => Self::Content,
|
||||
NotifyEventKind::Modify(ModifyKind::Data(DataChange::Size)) => Self::Size,
|
||||
NotifyEventKind::Modify(ModifyKind::Data(_)) => Self::Data,
|
||||
|
||||
// Metadata-modification events
|
||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::AccessTime)) => {
|
||||
Self::AccessTime
|
||||
}
|
||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::WriteTime)) => {
|
||||
Self::WriteTime
|
||||
}
|
||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::Permissions)) => {
|
||||
Self::Permissions
|
||||
}
|
||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::Ownership)) => {
|
||||
Self::Ownership
|
||||
}
|
||||
NotifyEventKind::Modify(ModifyKind::Metadata(_)) => Self::Metadata,
|
||||
|
||||
// General modification events
|
||||
NotifyEventKind::Modify(_) => Self::Modify,
|
||||
|
||||
// File/directory removal events
|
||||
NotifyEventKind::Remove(_) => Self::Remove,
|
||||
|
||||
// Catch-all for other events
|
||||
NotifyEventKind::Any | NotifyEventKind::Other => Self::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a distinct set of different change kinds
|
||||
#[derive(Clone, Debug, Deref, DerefMut, IntoIterator, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct ChangeKindSet(HashSet<ChangeKind>);
|
||||
|
||||
impl ChangeKindSet {
|
||||
/// Produces an empty set of [`ChangeKind`]
|
||||
pub fn empty() -> Self {
|
||||
Self(HashSet::new())
|
||||
}
|
||||
|
||||
/// Produces a set of all [`ChangeKind`]
|
||||
pub fn all() -> Self {
|
||||
vec![
|
||||
ChangeKind::Access,
|
||||
ChangeKind::AccessCloseExecute,
|
||||
ChangeKind::AccessCloseRead,
|
||||
ChangeKind::AccessCloseWrite,
|
||||
ChangeKind::AccessOpenExecute,
|
||||
ChangeKind::AccessOpenRead,
|
||||
ChangeKind::AccessOpenWrite,
|
||||
ChangeKind::AccessRead,
|
||||
ChangeKind::AccessTime,
|
||||
ChangeKind::Create,
|
||||
ChangeKind::Content,
|
||||
ChangeKind::Data,
|
||||
ChangeKind::Metadata,
|
||||
ChangeKind::Modify,
|
||||
ChangeKind::Remove,
|
||||
ChangeKind::Rename,
|
||||
ChangeKind::RenameBoth,
|
||||
ChangeKind::RenameFrom,
|
||||
ChangeKind::RenameTo,
|
||||
ChangeKind::Size,
|
||||
ChangeKind::Ownership,
|
||||
ChangeKind::Permissions,
|
||||
ChangeKind::WriteTime,
|
||||
ChangeKind::Unknown,
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Produces a changeset containing all of the access kinds
|
||||
pub fn access_set() -> Self {
|
||||
Self::access_open_set()
|
||||
| Self::access_close_set()
|
||||
| ChangeKind::AccessRead
|
||||
| ChangeKind::Access
|
||||
}
|
||||
|
||||
/// Produces a changeset containing all of the open access kinds
|
||||
pub fn access_open_set() -> Self {
|
||||
ChangeKind::AccessOpenExecute | ChangeKind::AccessOpenRead | ChangeKind::AccessOpenWrite
|
||||
}
|
||||
|
||||
/// Produces a changeset containing all of the close access kinds
|
||||
pub fn access_close_set() -> Self {
|
||||
ChangeKind::AccessCloseExecute | ChangeKind::AccessCloseRead | ChangeKind::AccessCloseWrite
|
||||
}
|
||||
|
||||
// Produces a changeset containing all of the modification kinds
|
||||
pub fn modify_set() -> Self {
|
||||
Self::modify_data_set() | Self::modify_metadata_set() | ChangeKind::Modify
|
||||
}
|
||||
|
||||
/// Produces a changeset containing all of the data modification kinds
|
||||
pub fn modify_data_set() -> Self {
|
||||
ChangeKind::Content | ChangeKind::Data | ChangeKind::Size
|
||||
}
|
||||
|
||||
/// Produces a changeset containing all of the metadata modification kinds
|
||||
pub fn modify_metadata_set() -> Self {
|
||||
ChangeKind::AccessTime
|
||||
| ChangeKind::Metadata
|
||||
| ChangeKind::Ownership
|
||||
| ChangeKind::Permissions
|
||||
| ChangeKind::WriteTime
|
||||
}
|
||||
|
||||
/// Produces a changeset containing all of the rename kinds
|
||||
pub fn rename_set() -> Self {
|
||||
ChangeKind::Rename | ChangeKind::RenameBoth | ChangeKind::RenameFrom | ChangeKind::RenameTo
|
||||
}
|
||||
|
||||
/// Consumes set and returns a sorted vec of the kinds of changes
|
||||
pub fn into_sorted_vec(self) -> Vec<ChangeKind> {
|
||||
let mut v = self.0.into_iter().collect::<Vec<_>>();
|
||||
v.sort();
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl ChangeKindSet {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(ChangeKindSet)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ChangeKindSet {
|
||||
/// Outputs a comma-separated series of [`ChangeKind`] as string that are sorted
|
||||
/// such that this will always be consistent output
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut kinds = self
|
||||
.0
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<String>>();
|
||||
kinds.sort_unstable();
|
||||
write!(f, "{}", kinds.join(","))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ChangeKindSet {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_string() == other.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ChangeKindSet {}
|
||||
|
||||
impl Hash for ChangeKindSet {
|
||||
/// Hashes based on the output of [`fmt::Display`]
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.to_string().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ChangeKindSet> for ChangeKindSet {
|
||||
type Output = Self;
|
||||
|
||||
fn bitor(mut self, rhs: ChangeKindSet) -> Self::Output {
|
||||
self.extend(rhs.0);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ChangeKind> for ChangeKindSet {
|
||||
type Output = Self;
|
||||
|
||||
fn bitor(mut self, rhs: ChangeKind) -> Self::Output {
|
||||
self.0.insert(rhs);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ChangeKindSet> for ChangeKind {
|
||||
type Output = ChangeKindSet;
|
||||
|
||||
fn bitor(self, rhs: ChangeKindSet) -> Self::Output {
|
||||
rhs | self
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<ChangeKindSet> for ChangeKindSet {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
ChangeKindSet(&self.0 - &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<&'_ ChangeKindSet> for &ChangeKindSet {
|
||||
type Output = ChangeKindSet;
|
||||
|
||||
fn sub(self, other: &ChangeKindSet) -> Self::Output {
|
||||
ChangeKindSet(&self.0 - &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ChangeKindSet {
|
||||
type Err = strum::ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut change_set = HashSet::new();
|
||||
|
||||
for word in s.split(',') {
|
||||
change_set.insert(ChangeKind::from_str(word.trim())?);
|
||||
}
|
||||
|
||||
Ok(ChangeKindSet(change_set))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<ChangeKind> for ChangeKindSet {
|
||||
fn from_iter<I: IntoIterator<Item = ChangeKind>>(iter: I) -> Self {
|
||||
let mut change_set = HashSet::new();
|
||||
|
||||
for i in iter {
|
||||
change_set.insert(i);
|
||||
}
|
||||
|
||||
ChangeKindSet(change_set)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChangeKind> for ChangeKindSet {
|
||||
fn from(change_kind: ChangeKind) -> Self {
|
||||
let mut set = Self::empty();
|
||||
set.insert(change_kind);
|
||||
set
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<ChangeKind>> for ChangeKindSet {
|
||||
fn from(changes: Vec<ChangeKind>) -> Self {
|
||||
changes.into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ChangeKindSet {
|
||||
fn default() -> Self {
|
||||
Self::empty()
|
||||
}
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use derive_more::{Display, From, Into};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents some command with arguments to execute
|
||||
#[derive(Clone, Debug, Display, From, Into, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct Cmd(String);
|
||||
|
||||
impl Cmd {
|
||||
/// Creates a new command from the given `cmd`
|
||||
pub fn new(cmd: impl Into<String>) -> Self {
|
||||
Self(cmd.into())
|
||||
}
|
||||
|
||||
/// Returns reference to the program portion of the command
|
||||
pub fn program(&self) -> &str {
|
||||
match self.0.split_once(' ') {
|
||||
Some((program, _)) => program.trim(),
|
||||
None => self.0.trim(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns reference to the arguments portion of the command
|
||||
pub fn arguments(&self) -> &str {
|
||||
match self.0.split_once(' ') {
|
||||
Some((_, arguments)) => arguments.trim(),
|
||||
None => "",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Cmd {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Cmd)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Cmd {
|
||||
type Target = String;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Cmd {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
use std::fs::FileType as StdFileType;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use derive_more::IsVariant;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::AsRefStr;
|
||||
|
||||
/// Represents information about a single entry within a directory
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
pub struct DirEntry {
|
||||
/// Represents the full path to the entry
|
||||
pub path: PathBuf,
|
||||
|
||||
/// Represents the type of the entry as a file/dir/symlink
|
||||
pub file_type: FileType,
|
||||
|
||||
/// Depth at which this entry was created relative to the root (0 being immediately within
|
||||
/// root)
|
||||
pub depth: usize,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl DirEntry {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(DirEntry)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the type associated with a dir entry
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, AsRefStr, IsVariant, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum FileType {
|
||||
Dir,
|
||||
File,
|
||||
Symlink,
|
||||
}
|
||||
|
||||
impl From<StdFileType> for FileType {
|
||||
fn from(ft: StdFileType) -> Self {
|
||||
if ft.is_dir() {
|
||||
Self::Dir
|
||||
} else if ft.is_symlink() {
|
||||
Self::Symlink
|
||||
} else {
|
||||
Self::File
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl FileType {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(FileType)
|
||||
}
|
||||
}
|
@ -0,0 +1,404 @@
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{deserialize_u128_option, serialize_u128_option, FileType};
|
||||
|
||||
/// Represents metadata about some path on a remote machine
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct Metadata {
|
||||
/// Canonicalized path to the file or directory, resolving symlinks, only included
|
||||
/// if flagged during the request
|
||||
pub canonicalized_path: Option<PathBuf>,
|
||||
|
||||
/// Represents the type of the entry as a file/dir/symlink
|
||||
pub file_type: FileType,
|
||||
|
||||
/// Size of the file/directory/symlink in bytes
|
||||
pub len: u64,
|
||||
|
||||
/// Whether or not the file/directory/symlink is marked as unwriteable
|
||||
pub readonly: bool,
|
||||
|
||||
/// Represents the last time (in milliseconds) when the file/directory/symlink was accessed;
|
||||
/// can be optional as certain systems don't support this
|
||||
#[serde(serialize_with = "serialize_u128_option")]
|
||||
#[serde(deserialize_with = "deserialize_u128_option")]
|
||||
pub accessed: Option<u128>,
|
||||
|
||||
/// Represents when (in milliseconds) the file/directory/symlink was created;
|
||||
/// can be optional as certain systems don't support this
|
||||
#[serde(serialize_with = "serialize_u128_option")]
|
||||
#[serde(deserialize_with = "deserialize_u128_option")]
|
||||
pub created: Option<u128>,
|
||||
|
||||
/// Represents the last time (in milliseconds) when the file/directory/symlink was modified;
|
||||
/// can be optional as certain systems don't support this
|
||||
#[serde(serialize_with = "serialize_u128_option")]
|
||||
#[serde(deserialize_with = "deserialize_u128_option")]
|
||||
pub modified: Option<u128>,
|
||||
|
||||
/// Represents metadata that is specific to a unix remote machine
|
||||
pub unix: Option<UnixMetadata>,
|
||||
|
||||
/// Represents metadata that is specific to a windows remote machine
|
||||
pub windows: Option<WindowsMetadata>,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
pub async fn read(
|
||||
path: impl AsRef<Path>,
|
||||
canonicalize: bool,
|
||||
resolve_file_type: bool,
|
||||
) -> io::Result<Self> {
|
||||
let metadata = tokio::fs::symlink_metadata(path.as_ref()).await?;
|
||||
let canonicalized_path = if canonicalize {
|
||||
Some(tokio::fs::canonicalize(path.as_ref()).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// If asking for resolved file type and current type is symlink, then we want to refresh
|
||||
// our metadata to get the filetype for the resolved link
|
||||
let file_type = if resolve_file_type && metadata.file_type().is_symlink() {
|
||||
tokio::fs::metadata(path).await?.file_type()
|
||||
} else {
|
||||
metadata.file_type()
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
canonicalized_path,
|
||||
accessed: metadata
|
||||
.accessed()
|
||||
.ok()
|
||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_millis()),
|
||||
created: metadata
|
||||
.created()
|
||||
.ok()
|
||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_millis()),
|
||||
modified: metadata
|
||||
.modified()
|
||||
.ok()
|
||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_millis()),
|
||||
len: metadata.len(),
|
||||
readonly: metadata.permissions().readonly(),
|
||||
file_type: if file_type.is_dir() {
|
||||
FileType::Dir
|
||||
} else if file_type.is_file() {
|
||||
FileType::File
|
||||
} else {
|
||||
FileType::Symlink
|
||||
},
|
||||
|
||||
#[cfg(unix)]
|
||||
unix: Some({
|
||||
use std::os::unix::prelude::*;
|
||||
let mode = metadata.mode();
|
||||
crate::protocol::UnixMetadata::from(mode)
|
||||
}),
|
||||
#[cfg(not(unix))]
|
||||
unix: None,
|
||||
|
||||
#[cfg(windows)]
|
||||
windows: Some({
|
||||
use std::os::windows::prelude::*;
|
||||
let attributes = metadata.file_attributes();
|
||||
crate::protocol::WindowsMetadata::from(attributes)
|
||||
}),
|
||||
#[cfg(not(windows))]
|
||||
windows: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl Metadata {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(Metadata)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents unix-specific metadata about some path on a remote machine
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct UnixMetadata {
|
||||
/// Represents whether or not owner can read from the file
|
||||
pub owner_read: bool,
|
||||
|
||||
/// Represents whether or not owner can write to the file
|
||||
pub owner_write: bool,
|
||||
|
||||
/// Represents whether or not owner can execute the file
|
||||
pub owner_exec: bool,
|
||||
|
||||
/// Represents whether or not associated group can read from the file
|
||||
pub group_read: bool,
|
||||
|
||||
/// Represents whether or not associated group can write to the file
|
||||
pub group_write: bool,
|
||||
|
||||
/// Represents whether or not associated group can execute the file
|
||||
pub group_exec: bool,
|
||||
|
||||
/// Represents whether or not other can read from the file
|
||||
pub other_read: bool,
|
||||
|
||||
/// Represents whether or not other can write to the file
|
||||
pub other_write: bool,
|
||||
|
||||
/// Represents whether or not other can execute the file
|
||||
pub other_exec: bool,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl UnixMetadata {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(UnixMetadata)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for UnixMetadata {
|
||||
/// Create from a unix mode bitset
|
||||
fn from(mode: u32) -> Self {
|
||||
let flags = UnixFilePermissionFlags::from_bits_truncate(mode);
|
||||
Self {
|
||||
owner_read: flags.contains(UnixFilePermissionFlags::OWNER_READ),
|
||||
owner_write: flags.contains(UnixFilePermissionFlags::OWNER_WRITE),
|
||||
owner_exec: flags.contains(UnixFilePermissionFlags::OWNER_EXEC),
|
||||
group_read: flags.contains(UnixFilePermissionFlags::GROUP_READ),
|
||||
group_write: flags.contains(UnixFilePermissionFlags::GROUP_WRITE),
|
||||
group_exec: flags.contains(UnixFilePermissionFlags::GROUP_EXEC),
|
||||
other_read: flags.contains(UnixFilePermissionFlags::OTHER_READ),
|
||||
other_write: flags.contains(UnixFilePermissionFlags::OTHER_WRITE),
|
||||
other_exec: flags.contains(UnixFilePermissionFlags::OTHER_EXEC),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UnixMetadata> for u32 {
|
||||
/// Convert to a unix mode bitset
|
||||
fn from(metadata: UnixMetadata) -> Self {
|
||||
let mut flags = UnixFilePermissionFlags::empty();
|
||||
|
||||
if metadata.owner_read {
|
||||
flags.insert(UnixFilePermissionFlags::OWNER_READ);
|
||||
}
|
||||
if metadata.owner_write {
|
||||
flags.insert(UnixFilePermissionFlags::OWNER_WRITE);
|
||||
}
|
||||
if metadata.owner_exec {
|
||||
flags.insert(UnixFilePermissionFlags::OWNER_EXEC);
|
||||
}
|
||||
|
||||
if metadata.group_read {
|
||||
flags.insert(UnixFilePermissionFlags::GROUP_READ);
|
||||
}
|
||||
if metadata.group_write {
|
||||
flags.insert(UnixFilePermissionFlags::GROUP_WRITE);
|
||||
}
|
||||
if metadata.group_exec {
|
||||
flags.insert(UnixFilePermissionFlags::GROUP_EXEC);
|
||||
}
|
||||
|
||||
if metadata.other_read {
|
||||
flags.insert(UnixFilePermissionFlags::OTHER_READ);
|
||||
}
|
||||
if metadata.other_write {
|
||||
flags.insert(UnixFilePermissionFlags::OTHER_WRITE);
|
||||
}
|
||||
if metadata.other_exec {
|
||||
flags.insert(UnixFilePermissionFlags::OTHER_EXEC);
|
||||
}
|
||||
|
||||
flags.bits()
|
||||
}
|
||||
}
|
||||
|
||||
impl UnixMetadata {
|
||||
pub fn is_readonly(self) -> bool {
|
||||
!(self.owner_read || self.group_read || self.other_read)
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
struct UnixFilePermissionFlags: u32 {
|
||||
const OWNER_READ = 0o400;
|
||||
const OWNER_WRITE = 0o200;
|
||||
const OWNER_EXEC = 0o100;
|
||||
const GROUP_READ = 0o40;
|
||||
const GROUP_WRITE = 0o20;
|
||||
const GROUP_EXEC = 0o10;
|
||||
const OTHER_READ = 0o4;
|
||||
const OTHER_WRITE = 0o2;
|
||||
const OTHER_EXEC = 0o1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents windows-specific metadata about some path on a remote machine
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct WindowsMetadata {
|
||||
/// Represents whether or not a file or directory is an archive
|
||||
pub archive: bool,
|
||||
|
||||
/// Represents whether or not a file or directory is compressed
|
||||
pub compressed: bool,
|
||||
|
||||
/// Represents whether or not the file or directory is encrypted
|
||||
pub encrypted: bool,
|
||||
|
||||
/// Represents whether or not a file or directory is hidden
|
||||
pub hidden: bool,
|
||||
|
||||
/// Represents whether or not a directory or user data stream is configured with integrity
|
||||
pub integrity_stream: bool,
|
||||
|
||||
/// Represents whether or not a file does not have other attributes set
|
||||
pub normal: bool,
|
||||
|
||||
/// Represents whether or not a file or directory is not to be indexed by content indexing
|
||||
/// service
|
||||
pub not_content_indexed: bool,
|
||||
|
||||
/// Represents whether or not a user data stream is not to be read by the background data
|
||||
/// integrity scanner
|
||||
pub no_scrub_data: bool,
|
||||
|
||||
/// Represents whether or not the data of a file is not available immediately
|
||||
pub offline: bool,
|
||||
|
||||
/// Represents whether or not a file or directory is not fully present locally
|
||||
pub recall_on_data_access: bool,
|
||||
|
||||
/// Represents whether or not a file or directory has no physical representation on the local
|
||||
/// system (is virtual)
|
||||
pub recall_on_open: bool,
|
||||
|
||||
/// Represents whether or not a file or directory has an associated reparse point, or a file is
|
||||
/// a symbolic link
|
||||
pub reparse_point: bool,
|
||||
|
||||
/// Represents whether or not a file is a sparse file
|
||||
pub sparse_file: bool,
|
||||
|
||||
/// Represents whether or not a file or directory is used partially or exclusively by the
|
||||
/// operating system
|
||||
pub system: bool,
|
||||
|
||||
/// Represents whether or not a file is being used for temporary storage
|
||||
pub temporary: bool,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl WindowsMetadata {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(WindowsMetadata)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for WindowsMetadata {
|
||||
/// Create from a windows file attribute bitset
|
||||
fn from(file_attributes: u32) -> Self {
|
||||
let flags = WindowsFileAttributeFlags::from_bits_truncate(file_attributes);
|
||||
Self {
|
||||
archive: flags.contains(WindowsFileAttributeFlags::ARCHIVE),
|
||||
compressed: flags.contains(WindowsFileAttributeFlags::COMPRESSED),
|
||||
encrypted: flags.contains(WindowsFileAttributeFlags::ENCRYPTED),
|
||||
hidden: flags.contains(WindowsFileAttributeFlags::HIDDEN),
|
||||
integrity_stream: flags.contains(WindowsFileAttributeFlags::INTEGRITY_SYSTEM),
|
||||
normal: flags.contains(WindowsFileAttributeFlags::NORMAL),
|
||||
not_content_indexed: flags.contains(WindowsFileAttributeFlags::NOT_CONTENT_INDEXED),
|
||||
no_scrub_data: flags.contains(WindowsFileAttributeFlags::NO_SCRUB_DATA),
|
||||
offline: flags.contains(WindowsFileAttributeFlags::OFFLINE),
|
||||
recall_on_data_access: flags.contains(WindowsFileAttributeFlags::RECALL_ON_DATA_ACCESS),
|
||||
recall_on_open: flags.contains(WindowsFileAttributeFlags::RECALL_ON_OPEN),
|
||||
reparse_point: flags.contains(WindowsFileAttributeFlags::REPARSE_POINT),
|
||||
sparse_file: flags.contains(WindowsFileAttributeFlags::SPARSE_FILE),
|
||||
system: flags.contains(WindowsFileAttributeFlags::SYSTEM),
|
||||
temporary: flags.contains(WindowsFileAttributeFlags::TEMPORARY),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WindowsMetadata> for u32 {
|
||||
/// Convert to a windows file attribute bitset
|
||||
fn from(metadata: WindowsMetadata) -> Self {
|
||||
let mut flags = WindowsFileAttributeFlags::empty();
|
||||
|
||||
if metadata.archive {
|
||||
flags.insert(WindowsFileAttributeFlags::ARCHIVE);
|
||||
}
|
||||
if metadata.compressed {
|
||||
flags.insert(WindowsFileAttributeFlags::COMPRESSED);
|
||||
}
|
||||
if metadata.encrypted {
|
||||
flags.insert(WindowsFileAttributeFlags::ENCRYPTED);
|
||||
}
|
||||
if metadata.hidden {
|
||||
flags.insert(WindowsFileAttributeFlags::HIDDEN);
|
||||
}
|
||||
if metadata.integrity_stream {
|
||||
flags.insert(WindowsFileAttributeFlags::INTEGRITY_SYSTEM);
|
||||
}
|
||||
if metadata.normal {
|
||||
flags.insert(WindowsFileAttributeFlags::NORMAL);
|
||||
}
|
||||
if metadata.not_content_indexed {
|
||||
flags.insert(WindowsFileAttributeFlags::NOT_CONTENT_INDEXED);
|
||||
}
|
||||
if metadata.no_scrub_data {
|
||||
flags.insert(WindowsFileAttributeFlags::NO_SCRUB_DATA);
|
||||
}
|
||||
if metadata.offline {
|
||||
flags.insert(WindowsFileAttributeFlags::OFFLINE);
|
||||
}
|
||||
if metadata.recall_on_data_access {
|
||||
flags.insert(WindowsFileAttributeFlags::RECALL_ON_DATA_ACCESS);
|
||||
}
|
||||
if metadata.recall_on_open {
|
||||
flags.insert(WindowsFileAttributeFlags::RECALL_ON_OPEN);
|
||||
}
|
||||
if metadata.reparse_point {
|
||||
flags.insert(WindowsFileAttributeFlags::REPARSE_POINT);
|
||||
}
|
||||
if metadata.sparse_file {
|
||||
flags.insert(WindowsFileAttributeFlags::SPARSE_FILE);
|
||||
}
|
||||
if metadata.system {
|
||||
flags.insert(WindowsFileAttributeFlags::SYSTEM);
|
||||
}
|
||||
if metadata.temporary {
|
||||
flags.insert(WindowsFileAttributeFlags::TEMPORARY);
|
||||
}
|
||||
|
||||
flags.bits()
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
struct WindowsFileAttributeFlags: u32 {
|
||||
const ARCHIVE = 0x20;
|
||||
const COMPRESSED = 0x800;
|
||||
const ENCRYPTED = 0x4000;
|
||||
const HIDDEN = 0x2;
|
||||
const INTEGRITY_SYSTEM = 0x8000;
|
||||
const NORMAL = 0x80;
|
||||
const NOT_CONTENT_INDEXED = 0x2000;
|
||||
const NO_SCRUB_DATA = 0x20000;
|
||||
const OFFLINE = 0x1000;
|
||||
const RECALL_ON_DATA_ACCESS = 0x400000;
|
||||
const RECALL_ON_OPEN = 0x40000;
|
||||
const REPARSE_POINT = 0x400;
|
||||
const SPARSE_FILE = 0x200;
|
||||
const SYSTEM = 0x4;
|
||||
const TEMPORARY = 0x100;
|
||||
const VIRTUAL = 0x10000;
|
||||
}
|
||||
}
|
@ -0,0 +1,140 @@
|
||||
use std::fmt;
|
||||
use std::num::ParseIntError;
|
||||
use std::str::FromStr;
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
use portable_pty::PtySize as PortablePtySize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents the size associated with a remote PTY
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct PtySize {
|
||||
/// Number of lines of text
|
||||
pub rows: u16,
|
||||
|
||||
/// Number of columns of text
|
||||
pub cols: u16,
|
||||
|
||||
/// Width of a cell in pixels. Note that some systems never fill this value and ignore it.
|
||||
#[serde(default)]
|
||||
pub pixel_width: u16,
|
||||
|
||||
/// Height of a cell in pixels. Note that some systems never fill this value and ignore it.
|
||||
#[serde(default)]
|
||||
pub pixel_height: u16,
|
||||
}
|
||||
|
||||
impl PtySize {
|
||||
/// Creates new size using just rows and columns
|
||||
pub fn from_rows_and_cols(rows: u16, cols: u16) -> Self {
|
||||
Self {
|
||||
rows,
|
||||
cols,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl PtySize {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(PtySize)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PortablePtySize> for PtySize {
|
||||
fn from(size: PortablePtySize) -> Self {
|
||||
Self {
|
||||
rows: size.rows,
|
||||
cols: size.cols,
|
||||
pixel_width: size.pixel_width,
|
||||
pixel_height: size.pixel_height,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PtySize> for PortablePtySize {
|
||||
fn from(size: PtySize) -> Self {
|
||||
Self {
|
||||
rows: size.rows,
|
||||
cols: size.cols,
|
||||
pixel_width: size.pixel_width,
|
||||
pixel_height: size.pixel_height,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PtySize {
|
||||
/// Prints out `rows,cols[,pixel_width,pixel_height]` where the
|
||||
/// pixel width and pixel height are only included if either
|
||||
/// one of them is not zero
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{},{}", self.rows, self.cols)?;
|
||||
if self.pixel_width > 0 || self.pixel_height > 0 {
|
||||
write!(f, ",{},{}", self.pixel_width, self.pixel_height)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PtySize {
|
||||
fn default() -> Self {
|
||||
PtySize {
|
||||
rows: 24,
|
||||
cols: 80,
|
||||
pixel_width: 0,
|
||||
pixel_height: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Display, Error)]
|
||||
pub enum PtySizeParseError {
|
||||
MissingRows,
|
||||
MissingColumns,
|
||||
InvalidRows(ParseIntError),
|
||||
InvalidColumns(ParseIntError),
|
||||
InvalidPixelWidth(ParseIntError),
|
||||
InvalidPixelHeight(ParseIntError),
|
||||
}
|
||||
|
||||
impl FromStr for PtySize {
|
||||
type Err = PtySizeParseError;
|
||||
|
||||
/// Attempts to parse a str into PtySize using one of the following formats:
|
||||
///
|
||||
/// * rows,cols (defaults to 0 for pixel_width & pixel_height)
|
||||
/// * rows,cols,pixel_width,pixel_height
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut tokens = s.split(',');
|
||||
|
||||
Ok(Self {
|
||||
rows: tokens
|
||||
.next()
|
||||
.ok_or(PtySizeParseError::MissingRows)?
|
||||
.trim()
|
||||
.parse()
|
||||
.map_err(PtySizeParseError::InvalidRows)?,
|
||||
cols: tokens
|
||||
.next()
|
||||
.ok_or(PtySizeParseError::MissingColumns)?
|
||||
.trim()
|
||||
.parse()
|
||||
.map_err(PtySizeParseError::InvalidColumns)?,
|
||||
pixel_width: tokens
|
||||
.next()
|
||||
.map(|s| s.trim().parse())
|
||||
.transpose()
|
||||
.map_err(PtySizeParseError::InvalidPixelWidth)?
|
||||
.unwrap_or(0),
|
||||
pixel_height: tokens
|
||||
.next()
|
||||
.map(|s| s.trim().parse())
|
||||
.transpose()
|
||||
.map_err(PtySizeParseError::InvalidPixelHeight)?
|
||||
.unwrap_or(0),
|
||||
})
|
||||
}
|
||||
}
|
@ -0,0 +1,425 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::FileType;
|
||||
|
||||
/// Id associated with a search
|
||||
pub type SearchId = u32;
|
||||
|
||||
/// Represents a query to perform against the filesystem
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct SearchQuery {
|
||||
/// Kind of data to examine using condition
|
||||
pub target: SearchQueryTarget,
|
||||
|
||||
/// Condition to meet to be considered a match
|
||||
pub condition: SearchQueryCondition,
|
||||
|
||||
/// Paths in which to perform the query
|
||||
pub paths: Vec<PathBuf>,
|
||||
|
||||
/// Options to apply to the query
|
||||
#[serde(default)]
|
||||
pub options: SearchQueryOptions,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQuery {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQuery)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for SearchQuery {
|
||||
type Err = serde_json::error::Error;
|
||||
|
||||
/// Parses search query from a JSON string
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
serde_json::from_str(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// Kind of data to examine using conditions
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SearchQueryTarget {
|
||||
/// Checks path of file, directory, or symlink
|
||||
Path,
|
||||
|
||||
/// Checks contents of files
|
||||
Contents,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryTarget {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryTarget)
|
||||
}
|
||||
}
|
||||
|
||||
/// Condition used to find a match in a search query
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
||||
pub enum SearchQueryCondition {
|
||||
/// Text is found anywhere (all regex patterns are escaped)
|
||||
Contains { value: String },
|
||||
|
||||
/// Begins with some text (all regex patterns are escaped)
|
||||
EndsWith { value: String },
|
||||
|
||||
/// Matches some text exactly (all regex patterns are escaped)
|
||||
Equals { value: String },
|
||||
|
||||
/// Any of the conditions match
|
||||
Or { value: Vec<SearchQueryCondition> },
|
||||
|
||||
/// Matches some regex
|
||||
Regex { value: String },
|
||||
|
||||
/// Begins with some text (all regex patterns are escaped)
|
||||
StartsWith { value: String },
|
||||
}
|
||||
|
||||
impl SearchQueryCondition {
|
||||
/// Creates a new instance with `Contains` variant
|
||||
pub fn contains(value: impl Into<String>) -> Self {
|
||||
Self::Contains {
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance with `EndsWith` variant
|
||||
pub fn ends_with(value: impl Into<String>) -> Self {
|
||||
Self::EndsWith {
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance with `Equals` variant
|
||||
pub fn equals(value: impl Into<String>) -> Self {
|
||||
Self::Equals {
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance with `Or` variant
|
||||
pub fn or<I, C>(value: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = C>,
|
||||
C: Into<SearchQueryCondition>,
|
||||
{
|
||||
Self::Or {
|
||||
value: value.into_iter().map(|s| s.into()).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance with `Regex` variant
|
||||
pub fn regex(value: impl Into<String>) -> Self {
|
||||
Self::Regex {
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance with `StartsWith` variant
|
||||
pub fn starts_with(value: impl Into<String>) -> Self {
|
||||
Self::StartsWith {
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the condition in a regex string
|
||||
pub fn to_regex_string(&self) -> String {
|
||||
match self {
|
||||
Self::Contains { value } => regex::escape(value),
|
||||
Self::EndsWith { value } => format!(r"{}$", regex::escape(value)),
|
||||
Self::Equals { value } => format!(r"^{}$", regex::escape(value)),
|
||||
Self::Regex { value } => value.to_string(),
|
||||
Self::StartsWith { value } => format!(r"^{}", regex::escape(value)),
|
||||
Self::Or { value } => {
|
||||
let mut s = String::new();
|
||||
for (i, condition) in value.iter().enumerate() {
|
||||
if i > 0 {
|
||||
s.push('|');
|
||||
}
|
||||
s.push_str(&condition.to_regex_string());
|
||||
}
|
||||
s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryCondition {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryCondition)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for SearchQueryCondition {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
/// Parses search query from a JSON string
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(Self::regex(s))
|
||||
}
|
||||
}
|
||||
|
||||
/// Options associated with a search query
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(default)]
|
||||
pub struct SearchQueryOptions {
|
||||
/// Restrict search to only these file types (otherwise all are allowed).
|
||||
pub allowed_file_types: HashSet<FileType>,
|
||||
|
||||
/// Regex to use to filter paths being searched to only those that match the include condition.
|
||||
pub include: Option<SearchQueryCondition>,
|
||||
|
||||
/// Regex to use to filter paths being searched to only those that do not match the exclude.
|
||||
/// condition
|
||||
pub exclude: Option<SearchQueryCondition>,
|
||||
|
||||
/// If true, will search upward through parent directories rather than the traditional downward
|
||||
/// search that recurses through all children directories.
|
||||
///
|
||||
/// Note that this will use maximum depth to apply to the reverse direction, and will only look
|
||||
/// through each ancestor directory's immediate entries. In other words, this will not result
|
||||
/// in recursing through sibling directories.
|
||||
///
|
||||
/// An upward search will ALWAYS search the contents of a directory, so this means providing a
|
||||
/// path to a directory will search its entries EVEN if the max_depth is 0.
|
||||
pub upward: bool,
|
||||
|
||||
/// Search should follow symbolic links.
|
||||
pub follow_symbolic_links: bool,
|
||||
|
||||
/// Maximum results to return before stopping the query.
|
||||
pub limit: Option<u64>,
|
||||
|
||||
/// Maximum depth (directories) to search
|
||||
///
|
||||
/// The smallest depth is 0 and always corresponds to the path given to the new function on
|
||||
/// this type. Its direct descendents have depth 1, and their descendents have depth 2, and so
|
||||
/// on.
|
||||
///
|
||||
/// Note that this will not simply filter the entries of the iterator, but it will actually
|
||||
/// avoid descending into directories when the depth is exceeded.
|
||||
pub max_depth: Option<u64>,
|
||||
|
||||
/// Amount of results to batch before sending back excluding final submission that will always
|
||||
/// include the remaining results even if less than pagination request.
|
||||
pub pagination: Option<u64>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryOptions {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryOptions)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a match for a search query
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
||||
pub enum SearchQueryMatch {
|
||||
/// Matches part of a file's path
|
||||
Path(SearchQueryPathMatch),
|
||||
|
||||
/// Matches part of a file's contents
|
||||
Contents(SearchQueryContentsMatch),
|
||||
}
|
||||
|
||||
impl SearchQueryMatch {
|
||||
pub fn into_path_match(self) -> Option<SearchQueryPathMatch> {
|
||||
match self {
|
||||
Self::Path(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_contents_match(self) -> Option<SearchQueryContentsMatch> {
|
||||
match self {
|
||||
Self::Contents(x) => Some(x),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryMatch {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryMatch)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents details for a match on a path
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct SearchQueryPathMatch {
|
||||
/// Path associated with the match
|
||||
pub path: PathBuf,
|
||||
|
||||
/// Collection of matches tied to `path` where each submatch's byte offset is relative to
|
||||
/// `path`
|
||||
pub submatches: Vec<SearchQuerySubmatch>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryPathMatch {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryPathMatch)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents details for a match on a file's contents
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct SearchQueryContentsMatch {
|
||||
/// Path to file whose contents match
|
||||
pub path: PathBuf,
|
||||
|
||||
/// Line(s) that matched
|
||||
pub lines: SearchQueryMatchData,
|
||||
|
||||
/// Line number where match starts (base index 1)
|
||||
pub line_number: u64,
|
||||
|
||||
/// Absolute byte offset corresponding to the start of `lines` in the data being searched
|
||||
pub absolute_offset: u64,
|
||||
|
||||
/// Collection of matches tied to `lines` where each submatch's byte offset is relative to
|
||||
/// `lines` and not the overall content
|
||||
pub submatches: Vec<SearchQuerySubmatch>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryContentsMatch {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryContentsMatch)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct SearchQuerySubmatch {
|
||||
/// Content matched by query
|
||||
pub r#match: SearchQueryMatchData,
|
||||
|
||||
/// Byte offset representing start of submatch (inclusive)
|
||||
pub start: u64,
|
||||
|
||||
/// Byte offset representing end of submatch (exclusive)
|
||||
pub end: u64,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQuerySubmatch {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQuerySubmatch)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(
|
||||
rename_all = "snake_case",
|
||||
deny_unknown_fields,
|
||||
tag = "type",
|
||||
content = "value"
|
||||
)]
|
||||
pub enum SearchQueryMatchData {
|
||||
/// Match represented as UTF-8 text
|
||||
Text(String),
|
||||
|
||||
/// Match represented as bytes
|
||||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
impl SearchQueryMatchData {
|
||||
/// Creates a new instance with `Text` variant
|
||||
pub fn text(value: impl Into<String>) -> Self {
|
||||
Self::Text(value.into())
|
||||
}
|
||||
|
||||
/// Creates a new instance with `Bytes` variant
|
||||
pub fn bytes(value: impl Into<Vec<u8>>) -> Self {
|
||||
Self::Bytes(value.into())
|
||||
}
|
||||
|
||||
/// Returns the UTF-8 str reference to the data, if is valid UTF-8
|
||||
pub fn to_str(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::Text(x) => Some(x),
|
||||
Self::Bytes(x) => std::str::from_utf8(x).ok(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts data to a UTF-8 string, replacing any invalid UTF-8 sequences with
|
||||
/// [`U+FFFD REPLACEMENT CHARACTER`](https://doc.rust-lang.org/nightly/core/char/const.REPLACEMENT_CHARACTER.html)
|
||||
pub fn to_string_lossy(&self) -> Cow<'_, str> {
|
||||
match self {
|
||||
Self::Text(x) => Cow::Borrowed(x),
|
||||
Self::Bytes(x) => String::from_utf8_lossy(x),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SearchQueryMatchData {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SearchQueryMatchData)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
mod search_query_condition {
|
||||
use test_log::test;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn to_regex_string_should_convert_to_appropriate_regex_and_escape_as_needed() {
|
||||
assert_eq!(
|
||||
SearchQueryCondition::contains("t^es$t").to_regex_string(),
|
||||
r"t\^es\$t"
|
||||
);
|
||||
assert_eq!(
|
||||
SearchQueryCondition::ends_with("t^es$t").to_regex_string(),
|
||||
r"t\^es\$t$"
|
||||
);
|
||||
assert_eq!(
|
||||
SearchQueryCondition::equals("t^es$t").to_regex_string(),
|
||||
r"^t\^es\$t$"
|
||||
);
|
||||
assert_eq!(
|
||||
SearchQueryCondition::or([
|
||||
SearchQueryCondition::contains("t^es$t"),
|
||||
SearchQueryCondition::equals("t^es$t"),
|
||||
SearchQueryCondition::regex("^test$"),
|
||||
])
|
||||
.to_regex_string(),
|
||||
r"t\^es\$t|^t\^es\$t$|^test$"
|
||||
);
|
||||
assert_eq!(
|
||||
SearchQueryCondition::regex("test").to_regex_string(),
|
||||
"test"
|
||||
);
|
||||
assert_eq!(
|
||||
SearchQueryCondition::starts_with("t^es$t").to_regex_string(),
|
||||
r"^t\^es\$t"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents information about a system
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct SystemInfo {
|
||||
/// Family of the operating system as described in
|
||||
/// https://doc.rust-lang.org/std/env/consts/constant.FAMILY.html
|
||||
pub family: String,
|
||||
|
||||
/// Name of the specific operating system as described in
|
||||
/// https://doc.rust-lang.org/std/env/consts/constant.OS.html
|
||||
pub os: String,
|
||||
|
||||
/// Architecture of the CPI as described in
|
||||
/// https://doc.rust-lang.org/std/env/consts/constant.ARCH.html
|
||||
pub arch: String,
|
||||
|
||||
/// Current working directory of the running server process
|
||||
pub current_dir: PathBuf,
|
||||
|
||||
/// Primary separator for path components for the current platform
|
||||
/// as defined in https://doc.rust-lang.org/std/path/constant.MAIN_SEPARATOR.html
|
||||
pub main_separator: char,
|
||||
|
||||
/// Name of the user running the server process
|
||||
pub username: String,
|
||||
|
||||
/// Default shell tied to user running the server process
|
||||
pub shell: String,
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl SystemInfo {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(SystemInfo)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for SystemInfo {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
family: env::consts::FAMILY.to_string(),
|
||||
os: env::consts::OS.to_string(),
|
||||
arch: env::consts::ARCH.to_string(),
|
||||
current_dir: env::current_dir().unwrap_or_default(),
|
||||
main_separator: std::path::MAIN_SEPARATOR,
|
||||
username: whoami::username(),
|
||||
shell: if cfg!(windows) {
|
||||
env::var("ComSpec").unwrap_or_else(|_| String::from("cmd.exe"))
|
||||
} else {
|
||||
env::var("SHELL").unwrap_or_else(|_| String::from("/bin/sh"))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub(crate) fn deserialize_u128_option<'de, D>(deserializer: D) -> Result<Option<u128>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
match Option::<String>::deserialize(deserializer)? {
|
||||
Some(s) => match s.parse::<u128>() {
|
||||
Ok(value) => Ok(Some(value)),
|
||||
Err(error) => Err(serde::de::Error::custom(format!(
|
||||
"Cannot convert to u128 with error: {error:?}"
|
||||
))),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn serialize_u128_option<S: serde::Serializer>(
|
||||
val: &Option<u128>,
|
||||
s: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
match val {
|
||||
Some(v) => format!("{}", *v).serialize(s),
|
||||
None => s.serialize_unit(),
|
||||
}
|
||||
}
|
@ -1,325 +0,0 @@
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use distant_core::{
|
||||
DistantApi, DistantApiServerHandler, DistantChannelExt, DistantClient, DistantCtx,
|
||||
};
|
||||
use distant_net::auth::{DummyAuthHandler, Verifier};
|
||||
use distant_net::client::Client;
|
||||
use distant_net::common::{InmemoryTransport, OneshotListener, Version};
|
||||
use distant_net::server::{Server, ServerRef};
|
||||
use distant_protocol::PROTOCOL_VERSION;
|
||||
|
||||
/// Stands up an inmemory client and server using the given api.
|
||||
async fn setup(api: impl DistantApi + Send + Sync + 'static) -> (DistantClient, ServerRef) {
|
||||
let (t1, t2) = InmemoryTransport::pair(100);
|
||||
|
||||
let server = Server::new()
|
||||
.handler(DistantApiServerHandler::new(api))
|
||||
.verifier(Verifier::none())
|
||||
.version(Version::new(
|
||||
PROTOCOL_VERSION.major,
|
||||
PROTOCOL_VERSION.minor,
|
||||
PROTOCOL_VERSION.patch,
|
||||
))
|
||||
.start(OneshotListener::from_value(t2))
|
||||
.expect("Failed to start server");
|
||||
|
||||
let client: DistantClient = Client::build()
|
||||
.auth_handler(DummyAuthHandler)
|
||||
.connector(t1)
|
||||
.version(Version::new(
|
||||
PROTOCOL_VERSION.major,
|
||||
PROTOCOL_VERSION.minor,
|
||||
PROTOCOL_VERSION.patch,
|
||||
))
|
||||
.connect()
|
||||
.await
|
||||
.expect("Failed to connect to server");
|
||||
|
||||
(client, server)
|
||||
}
|
||||
|
||||
mod single {
|
||||
use test_log::test;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn should_support_single_request_returning_error() {
|
||||
struct TestDistantApi;
|
||||
|
||||
#[async_trait]
|
||||
impl DistantApi for TestDistantApi {
|
||||
async fn read_file(&self, _ctx: DistantCtx, _path: PathBuf) -> io::Result<Vec<u8>> {
|
||||
Err(io::Error::new(io::ErrorKind::NotFound, "test error"))
|
||||
}
|
||||
}
|
||||
|
||||
let (mut client, _server) = setup(TestDistantApi).await;
|
||||
|
||||
let error = client.read_file(PathBuf::from("file")).await.unwrap_err();
|
||||
assert_eq!(error.kind(), io::ErrorKind::NotFound);
|
||||
assert_eq!(error.to_string(), "test error");
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn should_support_single_request_returning_success() {
|
||||
struct TestDistantApi;
|
||||
|
||||
#[async_trait]
|
||||
impl DistantApi for TestDistantApi {
|
||||
async fn read_file(&self, _ctx: DistantCtx, _path: PathBuf) -> io::Result<Vec<u8>> {
|
||||
Ok(b"hello world".to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
let (mut client, _server) = setup(TestDistantApi).await;
|
||||
|
||||
let contents = client.read_file(PathBuf::from("file")).await.unwrap();
|
||||
assert_eq!(contents, b"hello world");
|
||||
}
|
||||
}
|
||||
|
||||
mod batch_parallel {
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
|
||||
use distant_net::common::Request;
|
||||
use distant_protocol::{Msg, Request as RequestPayload};
|
||||
use test_log::test;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn should_support_multiple_requests_running_in_parallel() {
|
||||
struct TestDistantApi;
|
||||
|
||||
#[async_trait]
|
||||
impl DistantApi for TestDistantApi {
|
||||
async fn read_file(&self, _ctx: DistantCtx, path: PathBuf) -> io::Result<Vec<u8>> {
|
||||
if path.to_str().unwrap() == "slow" {
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
let time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
Ok((time.as_millis() as u64).to_be_bytes().to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
let (mut client, _server) = setup(TestDistantApi).await;
|
||||
|
||||
let request = Request::new(Msg::batch([
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file1"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("slow"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file2"),
|
||||
},
|
||||
]));
|
||||
|
||||
let response = client.send(request).await.unwrap();
|
||||
let payloads = response.payload.into_batch().unwrap();
|
||||
|
||||
// Collect our times from the reading
|
||||
let mut times = Vec::new();
|
||||
for payload in payloads {
|
||||
match payload {
|
||||
distant_protocol::Response::Blob { data } => {
|
||||
let mut buf = [0u8; 8];
|
||||
buf.copy_from_slice(&data[..8]);
|
||||
times.push(u64::from_be_bytes(buf));
|
||||
}
|
||||
x => panic!("Unexpected payload: {x:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that these ran in parallel as the first and third requests should not be
|
||||
// over 500 milliseconds apart due to the sleep in the middle!
|
||||
let diff = times[0].abs_diff(times[2]);
|
||||
assert!(diff <= 500, "Sequential ordering detected");
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn should_run_all_requests_even_if_some_fail() {
|
||||
struct TestDistantApi;
|
||||
|
||||
#[async_trait]
|
||||
impl DistantApi for TestDistantApi {
|
||||
async fn read_file(&self, _ctx: DistantCtx, path: PathBuf) -> io::Result<Vec<u8>> {
|
||||
if path.to_str().unwrap() == "fail" {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "test error"));
|
||||
}
|
||||
|
||||
Ok(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
let (mut client, _server) = setup(TestDistantApi).await;
|
||||
|
||||
let request = Request::new(Msg::batch([
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file1"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("fail"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file2"),
|
||||
},
|
||||
]));
|
||||
|
||||
let response = client.send(request).await.unwrap();
|
||||
let payloads = response.payload.into_batch().unwrap();
|
||||
|
||||
// Should be a success, error, and success
|
||||
assert!(
|
||||
matches!(payloads[0], distant_protocol::Response::Blob { .. }),
|
||||
"Unexpected payloads[0]: {:?}",
|
||||
payloads[0]
|
||||
);
|
||||
assert!(
|
||||
matches!(
|
||||
&payloads[1],
|
||||
distant_protocol::Response::Error(distant_protocol::Error { kind, description })
|
||||
if matches!(kind, distant_protocol::ErrorKind::Other) && description == "test error"
|
||||
),
|
||||
"Unexpected payloads[1]: {:?}",
|
||||
payloads[1]
|
||||
);
|
||||
assert!(
|
||||
matches!(payloads[2], distant_protocol::Response::Blob { .. }),
|
||||
"Unexpected payloads[2]: {:?}",
|
||||
payloads[2]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod batch_sequence {
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
|
||||
use distant_net::common::Request;
|
||||
use distant_protocol::{Msg, Request as RequestPayload};
|
||||
use test_log::test;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn should_support_multiple_requests_running_in_sequence() {
|
||||
struct TestDistantApi;
|
||||
|
||||
#[async_trait]
|
||||
impl DistantApi for TestDistantApi {
|
||||
async fn read_file(&self, _ctx: DistantCtx, path: PathBuf) -> io::Result<Vec<u8>> {
|
||||
if path.to_str().unwrap() == "slow" {
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
let time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
Ok((time.as_millis() as u64).to_be_bytes().to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
let (mut client, _server) = setup(TestDistantApi).await;
|
||||
|
||||
let mut request = Request::new(Msg::batch([
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file1"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("slow"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file2"),
|
||||
},
|
||||
]));
|
||||
|
||||
// Mark as running in sequence
|
||||
request.header.insert("sequence", true);
|
||||
|
||||
let response = client.send(request).await.unwrap();
|
||||
let payloads = response.payload.into_batch().unwrap();
|
||||
|
||||
// Collect our times from the reading
|
||||
let mut times = Vec::new();
|
||||
for payload in payloads {
|
||||
match payload {
|
||||
distant_protocol::Response::Blob { data } => {
|
||||
let mut buf = [0u8; 8];
|
||||
buf.copy_from_slice(&data[..8]);
|
||||
times.push(u64::from_be_bytes(buf));
|
||||
}
|
||||
x => panic!("Unexpected payload: {x:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that these ran in sequence as the first and third requests should be
|
||||
// over 500 milliseconds apart due to the sleep in the middle!
|
||||
let diff = times[0].abs_diff(times[2]);
|
||||
assert!(diff > 500, "Parallel ordering detected");
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn should_interrupt_any_requests_following_a_failure() {
|
||||
struct TestDistantApi;
|
||||
|
||||
#[async_trait]
|
||||
impl DistantApi for TestDistantApi {
|
||||
async fn read_file(&self, _ctx: DistantCtx, path: PathBuf) -> io::Result<Vec<u8>> {
|
||||
if path.to_str().unwrap() == "fail" {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "test error"));
|
||||
}
|
||||
|
||||
Ok(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
let (mut client, _server) = setup(TestDistantApi).await;
|
||||
|
||||
let mut request = Request::new(Msg::batch([
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file1"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("fail"),
|
||||
},
|
||||
RequestPayload::FileRead {
|
||||
path: PathBuf::from("file2"),
|
||||
},
|
||||
]));
|
||||
|
||||
// Mark as running in sequence
|
||||
request.header.insert("sequence", true);
|
||||
|
||||
let response = client.send(request).await.unwrap();
|
||||
let payloads = response.payload.into_batch().unwrap();
|
||||
|
||||
// Should be a success, error, and interrupt
|
||||
assert!(
|
||||
matches!(payloads[0], distant_protocol::Response::Blob { .. }),
|
||||
"Unexpected payloads[0]: {:?}",
|
||||
payloads[0]
|
||||
);
|
||||
assert!(
|
||||
matches!(
|
||||
&payloads[1],
|
||||
distant_protocol::Response::Error(distant_protocol::Error { kind, description })
|
||||
if matches!(kind, distant_protocol::ErrorKind::Other) && description == "test error"
|
||||
),
|
||||
"Unexpected payloads[1]: {:?}",
|
||||
payloads[1]
|
||||
);
|
||||
assert!(
|
||||
matches!(
|
||||
&payloads[2],
|
||||
distant_protocol::Response::Error(distant_protocol::Error { kind, .. })
|
||||
if matches!(kind, distant_protocol::ErrorKind::Interrupted)
|
||||
),
|
||||
"Unexpected payloads[2]: {:?}",
|
||||
payloads[2]
|
||||
);
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
[package]
|
||||
name = "distant-local"
|
||||
description = "Library implementing distant API for local interactions"
|
||||
categories = ["network-programming"]
|
||||
version = "0.20.0"
|
||||
authors = ["Chip Senkbeil <chip@senkbeil.org>"]
|
||||
edition = "2021"
|
||||
homepage = "https://github.com/chipsenkbeil/distant"
|
||||
repository = "https://github.com/chipsenkbeil/distant"
|
||||
readme = "README.md"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[features]
|
||||
default = ["macos-fsevent"]
|
||||
|
||||
# If specified, will use MacOS FSEvent for file watching
|
||||
macos-fsevent = ["notify/macos_fsevent"]
|
||||
|
||||
# If specified, will use MacOS kqueue for file watching
|
||||
macos-kqueue = ["notify/macos_kqueue"]
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.68"
|
||||
distant-core = { version = "=0.20.0", path = "../distant-core" }
|
||||
grep = "0.2.12"
|
||||
ignore = "0.4.20"
|
||||
log = "0.4.18"
|
||||
notify = { version = "6.0.0", default-features = false, features = ["macos_fsevent"] }
|
||||
notify-debouncer-full = { version = "0.1.0", default-features = false }
|
||||
num_cpus = "1.15.0"
|
||||
portable-pty = "0.8.1"
|
||||
rand = { version = "0.8.5", features = ["getrandom"] }
|
||||
shell-words = "1.1.0"
|
||||
tokio = { version = "1.28.2", features = ["full"] }
|
||||
walkdir = "2.3.3"
|
||||
whoami = "1.4.0"
|
||||
winsplit = "0.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_fs = "1.0.13"
|
||||
env_logger = "0.10.0"
|
||||
indoc = "2.0.1"
|
||||
once_cell = "1.17.2"
|
||||
predicates = "3.0.3"
|
||||
rstest = "0.17.0"
|
||||
test-log = "0.2.11"
|
@ -1,45 +0,0 @@
|
||||
# distant local
|
||||
|
||||
[![Crates.io][distant_crates_img]][distant_crates_lnk] [![Docs.rs][distant_doc_img]][distant_doc_lnk] [![Rustc 1.70.0][distant_rustc_img]][distant_rustc_lnk]
|
||||
|
||||
[distant_crates_img]: https://img.shields.io/crates/v/distant-local.svg
|
||||
[distant_crates_lnk]: https://crates.io/crates/distant-local
|
||||
[distant_doc_img]: https://docs.rs/distant-local/badge.svg
|
||||
[distant_doc_lnk]: https://docs.rs/distant-local
|
||||
[distant_rustc_img]: https://img.shields.io/badge/distant_local-rustc_1.70+-lightgray.svg
|
||||
[distant_rustc_lnk]: https://blog.rust-lang.org/2023/06/01/Rust-1.70.0.html
|
||||
|
||||
## Details
|
||||
|
||||
The `distant-local` library acts as the primary implementation of a distant
|
||||
server that powers the CLI. The logic acts on the local machine of the server
|
||||
and is designed to be used as the foundation for distant operation handling.
|
||||
|
||||
## Installation
|
||||
|
||||
You can import the dependency by adding the following to your `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
distant-local = "0.20"
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```rust,no_run
|
||||
use distant_local::{Config, new_handler};
|
||||
|
||||
// Create a server API handler to be used with the server
|
||||
let handler = new_handler(Config::default()).unwrap();
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under either of
|
||||
|
||||
Apache License, Version 2.0, (LICENSE-APACHE or
|
||||
[apache-license][apache-license]) MIT license (LICENSE-MIT or
|
||||
[mit-license][mit-license]) at your option.
|
||||
|
||||
[apache-license]: http://www.apache.org/licenses/LICENSE-2.0
|
||||
[mit-license]: http://opensource.org/licenses/MIT
|
@ -1,28 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct Config {
|
||||
pub watch: WatchConfig,
|
||||
}
|
||||
|
||||
/// Configuration specifically for watching files and directories.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct WatchConfig {
|
||||
pub native: bool,
|
||||
pub poll_interval: Option<Duration>,
|
||||
pub compare_contents: bool,
|
||||
pub debounce_timeout: Duration,
|
||||
pub debounce_tick_rate: Option<Duration>,
|
||||
}
|
||||
|
||||
impl Default for WatchConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
native: true,
|
||||
poll_interval: None,
|
||||
compare_contents: false,
|
||||
debounce_timeout: Duration::from_millis(500),
|
||||
debounce_tick_rate: None,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,14 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
/// Capacity associated with the server's file watcher to pass events outbound
|
||||
pub const SERVER_WATCHER_CAPACITY: usize = 10000;
|
||||
|
||||
/// Represents the maximum size (in bytes) that data will be read from pipes
|
||||
/// per individual `read` call
|
||||
///
|
||||
/// Current setting is 16k size
|
||||
pub const MAX_PIPE_CHUNK_SIZE: usize = 16384;
|
||||
|
||||
/// Duration in milliseconds to sleep between reading stdout/stderr chunks
|
||||
/// to avoid sending many small messages to clients
|
||||
pub const READ_PAUSE_DURATION: Duration = Duration::from_millis(1);
|
@ -1,20 +0,0 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
#[doc = include_str!("../README.md")]
|
||||
#[cfg(doctest)]
|
||||
pub struct ReadmeDoctests;
|
||||
|
||||
mod api;
|
||||
mod config;
|
||||
mod constants;
|
||||
pub use api::Api;
|
||||
pub use config::*;
|
||||
use distant_core::DistantApiServerHandler;
|
||||
|
||||
/// Implementation of [`DistantApiServerHandler`] using [`Api`].
|
||||
pub type Handler = DistantApiServerHandler<Api>;
|
||||
|
||||
/// Initializes a new [`Handler`].
|
||||
pub fn new_handler(config: Config) -> std::io::Result<Handler> {
|
||||
Ok(Handler::new(Api::initialize(config)?))
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
mod authenticator;
|
||||
mod handler;
|
||||
mod keychain;
|
||||
mod methods;
|
||||
pub mod msg;
|
||||
|
||||
pub use authenticator::*;
|
||||
pub use handler::*;
|
||||
pub use keychain::*;
|
||||
pub use methods::*;
|
@ -0,0 +1,130 @@
|
||||
use std::io;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use super::{AuthenticationMethod, Authenticator, Challenge, Error, Question};
|
||||
use crate::common::HeapSecretKey;
|
||||
|
||||
/// Authenticaton method for a static secret key
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct StaticKeyAuthenticationMethod {
|
||||
key: HeapSecretKey,
|
||||
}
|
||||
|
||||
impl StaticKeyAuthenticationMethod {
|
||||
#[inline]
|
||||
pub fn new(key: impl Into<HeapSecretKey>) -> Self {
|
||||
Self { key: key.into() }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AuthenticationMethod for StaticKeyAuthenticationMethod {
|
||||
fn id(&self) -> &'static str {
|
||||
"static_key"
|
||||
}
|
||||
|
||||
async fn authenticate(&self, authenticator: &mut dyn Authenticator) -> io::Result<()> {
|
||||
let response = authenticator
|
||||
.challenge(Challenge {
|
||||
questions: vec![Question {
|
||||
label: "key".to_string(),
|
||||
text: "Provide a key: ".to_string(),
|
||||
options: Default::default(),
|
||||
}],
|
||||
options: Default::default(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
if response.answers.is_empty() {
|
||||
return Err(Error::non_fatal("missing answer").into_io_permission_denied());
|
||||
}
|
||||
|
||||
match response
|
||||
.answers
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap()
|
||||
.parse::<HeapSecretKey>()
|
||||
{
|
||||
Ok(key) if key == self.key => Ok(()),
|
||||
_ => Err(Error::non_fatal("answer does not match key").into_io_permission_denied()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_log::test;
|
||||
|
||||
use super::*;
|
||||
use crate::common::authentication::msg::{AuthenticationResponse, ChallengeResponse};
|
||||
use crate::common::FramedTransport;
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_fail_if_key_challenge_fails() {
|
||||
let method = StaticKeyAuthenticationMethod::new(b"".to_vec());
|
||||
let (mut t1, mut t2) = FramedTransport::test_pair(100);
|
||||
|
||||
// Queue up an invalid frame for our challenge to ensure it fails
|
||||
t2.write_frame(b"invalid initialization response")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
method.authenticate(&mut t1).await.unwrap_err().kind(),
|
||||
io::ErrorKind::InvalidData
|
||||
);
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_fail_if_no_answer_included_in_challenge_response() {
|
||||
let method = StaticKeyAuthenticationMethod::new(b"".to_vec());
|
||||
let (mut t1, mut t2) = FramedTransport::test_pair(100);
|
||||
|
||||
// Queue up a response to the initialization request
|
||||
t2.write_frame_for(&AuthenticationResponse::Challenge(ChallengeResponse {
|
||||
answers: Vec::new(),
|
||||
}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
method.authenticate(&mut t1).await.unwrap_err().kind(),
|
||||
io::ErrorKind::PermissionDenied
|
||||
);
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_fail_if_answer_does_not_match_key() {
|
||||
let method = StaticKeyAuthenticationMethod::new(b"answer".to_vec());
|
||||
let (mut t1, mut t2) = FramedTransport::test_pair(100);
|
||||
|
||||
// Queue up a response to the initialization request
|
||||
t2.write_frame_for(&AuthenticationResponse::Challenge(ChallengeResponse {
|
||||
answers: vec![HeapSecretKey::from(b"some key".to_vec()).to_string()],
|
||||
}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
method.authenticate(&mut t1).await.unwrap_err().kind(),
|
||||
io::ErrorKind::PermissionDenied
|
||||
);
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn authenticate_should_succeed_if_answer_matches_key() {
|
||||
let method = StaticKeyAuthenticationMethod::new(b"answer".to_vec());
|
||||
let (mut t1, mut t2) = FramedTransport::test_pair(100);
|
||||
|
||||
// Queue up a response to the initialization request
|
||||
t2.write_frame_for(&AuthenticationResponse::Challenge(ChallengeResponse {
|
||||
answers: vec![HeapSecretKey::from(b"answer".to_vec()).to_string()],
|
||||
}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
method.authenticate(&mut t1).await.unwrap();
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,109 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::{fmt, io};
|
||||
|
||||
use derive_more::IntoIterator;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::common::{utils, Value};
|
||||
|
||||
/// Generates a new [`Header`] of key/value pairs based on literals.
|
||||
///
|
||||
/// ```
|
||||
/// use distant_net::header;
|
||||
///
|
||||
/// let _header = header!("key" -> "value", "key2" -> 123);
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! header {
|
||||
($($key:literal -> $value:expr),* $(,)?) => {{
|
||||
let mut _header = $crate::common::Header::default();
|
||||
|
||||
$(
|
||||
_header.insert($key, $value);
|
||||
)*
|
||||
|
||||
_header
|
||||
}};
|
||||
}
|
||||
|
||||
/// Represents a packet header comprised of arbitrary data tied to string keys.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, IntoIterator, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct Header(HashMap<String, Value>);
|
||||
|
||||
impl Header {
|
||||
/// Creates an empty [`Header`] newtype wrapper.
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Exists purely to support serde serialization checks.
|
||||
#[inline]
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Inserts a key-value pair into the map.
|
||||
///
|
||||
/// If the map did not have this key present, [`None`] is returned.
|
||||
///
|
||||
/// If the map did have this key present, the value is updated, and the old value is returned.
|
||||
/// The key is not updated, though; this matters for types that can be `==` without being
|
||||
/// identical. See the [module-level documentation](std::collections#insert-and-complex-keys)
|
||||
/// for more.
|
||||
pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) -> Option<Value> {
|
||||
self.0.insert(key.into(), value.into())
|
||||
}
|
||||
|
||||
/// Retrieves a value from the header, attempting to convert it to the specified type `T`
|
||||
/// by cloning the value and then converting it.
|
||||
pub fn get_as<T>(&self, key: impl AsRef<str>) -> Option<io::Result<T>>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
self.0
|
||||
.get(key.as_ref())
|
||||
.map(|value| value.clone().cast_as())
|
||||
}
|
||||
|
||||
/// Serializes the header into bytes.
|
||||
pub fn to_vec(&self) -> io::Result<Vec<u8>> {
|
||||
utils::serialize_to_vec(self)
|
||||
}
|
||||
|
||||
/// Deserializes the header from bytes.
|
||||
pub fn from_slice(slice: &[u8]) -> io::Result<Self> {
|
||||
utils::deserialize_from_slice(slice)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Header {
|
||||
type Target = HashMap<String, Value>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Header {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Header {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{{")?;
|
||||
|
||||
for (key, value) in self.0.iter() {
|
||||
let value = serde_json::to_string(value).unwrap_or_else(|_| String::from("--"));
|
||||
write!(f, "\"{key}\" = {value}")?;
|
||||
}
|
||||
|
||||
write!(f, "}}")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -1,112 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::common::utils;
|
||||
|
||||
/// Generic value type for data passed through header.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct Value(serde_json::Value);
|
||||
|
||||
impl Value {
|
||||
/// Creates a new [`Value`] by converting `value` to the underlying type.
|
||||
pub fn new(value: impl Into<serde_json::Value>) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
|
||||
/// Serializes the value into bytes.
|
||||
pub fn to_vec(&self) -> io::Result<Vec<u8>> {
|
||||
utils::serialize_to_vec(self)
|
||||
}
|
||||
|
||||
/// Deserializes the value from bytes.
|
||||
pub fn from_slice(slice: &[u8]) -> io::Result<Self> {
|
||||
utils::deserialize_from_slice(slice)
|
||||
}
|
||||
|
||||
/// Attempts to convert this generic value to a specific type.
|
||||
pub fn cast_as<T>(self) -> io::Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
serde_json::from_value(self.0).map_err(|x| io::Error::new(io::ErrorKind::InvalidData, x))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Value {
|
||||
type Target = serde_json::Value;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Value {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_from {
|
||||
($($type:ty),+) => {
|
||||
$(
|
||||
impl From<$type> for Value {
|
||||
fn from(x: $type) -> Self {
|
||||
Self(From::from(x))
|
||||
}
|
||||
}
|
||||
)+
|
||||
};
|
||||
}
|
||||
|
||||
impl_from!(
|
||||
(),
|
||||
i8, i16, i32, i64, isize,
|
||||
u8, u16, u32, u64, usize,
|
||||
f32, f64,
|
||||
bool, String, serde_json::Number,
|
||||
serde_json::Map<String, serde_json::Value>
|
||||
);
|
||||
|
||||
impl<'a, T> From<&'a [T]> for Value
|
||||
where
|
||||
T: Clone + Into<serde_json::Value>,
|
||||
{
|
||||
fn from(x: &'a [T]) -> Self {
|
||||
Self(From::from(x))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for Value {
|
||||
fn from(x: &'a str) -> Self {
|
||||
Self(From::from(x))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<Cow<'a, str>> for Value {
|
||||
fn from(x: Cow<'a, str>) -> Self {
|
||||
Self(From::from(x))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Option<T>> for Value
|
||||
where
|
||||
T: Into<serde_json::Value>,
|
||||
{
|
||||
fn from(x: Option<T>) -> Self {
|
||||
Self(From::from(x))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Vec<T>> for Value
|
||||
where
|
||||
T: Into<serde_json::Value>,
|
||||
{
|
||||
fn from(x: Vec<T>) -> Self {
|
||||
Self(From::from(x))
|
||||
}
|
||||
}
|
@ -1,132 +0,0 @@
|
||||
use semver::{Comparator, Op, Prerelease, Version as SemVer};
|
||||
use std::fmt;
|
||||
|
||||
/// Represents a version and compatibility rules.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Version {
|
||||
inner: SemVer,
|
||||
lower: Comparator,
|
||||
upper: Comparator,
|
||||
}
|
||||
|
||||
impl Version {
|
||||
/// Creates a new version in the form `major.minor.patch` with a ruleset that is used to check
|
||||
/// other versions such that `>=0.1.2, <0.2.0` or `>=1.2.3, <2` depending on whether or not the
|
||||
/// major version is `0`.
|
||||
///
|
||||
/// ```
|
||||
/// use distant_net::common::Version;
|
||||
///
|
||||
/// // Matching versions are compatible
|
||||
/// let a = Version::new(1, 2, 3);
|
||||
/// let b = Version::new(1, 2, 3);
|
||||
/// assert!(a.is_compatible_with(&b));
|
||||
///
|
||||
/// // Version 1.2.3 is compatible with 1.2.4, but not the other way
|
||||
/// let a = Version::new(1, 2, 3);
|
||||
/// let b = Version::new(1, 2, 4);
|
||||
/// assert!(a.is_compatible_with(&b));
|
||||
/// assert!(!b.is_compatible_with(&a));
|
||||
///
|
||||
/// // Version 1.2.3 is compatible with 1.3.0, but not 2
|
||||
/// let a = Version::new(1, 2, 3);
|
||||
/// assert!(a.is_compatible_with(&Version::new(1, 3, 0)));
|
||||
/// assert!(!a.is_compatible_with(&Version::new(2, 0, 0)));
|
||||
///
|
||||
/// // Version 0.1.2 is compatible with 0.1.3, but not the other way
|
||||
/// let a = Version::new(0, 1, 2);
|
||||
/// let b = Version::new(0, 1, 3);
|
||||
/// assert!(a.is_compatible_with(&b));
|
||||
/// assert!(!b.is_compatible_with(&a));
|
||||
///
|
||||
/// // Version 0.1.2 is not compatible with 0.2
|
||||
/// let a = Version::new(0, 1, 2);
|
||||
/// let b = Version::new(0, 2, 0);
|
||||
/// assert!(!a.is_compatible_with(&b));
|
||||
/// assert!(!b.is_compatible_with(&a));
|
||||
/// ```
|
||||
pub const fn new(major: u64, minor: u64, patch: u64) -> Self {
|
||||
Self {
|
||||
inner: SemVer::new(major, minor, patch),
|
||||
lower: Comparator {
|
||||
op: Op::GreaterEq,
|
||||
major,
|
||||
minor: Some(minor),
|
||||
patch: Some(patch),
|
||||
pre: Prerelease::EMPTY,
|
||||
},
|
||||
upper: Comparator {
|
||||
op: Op::Less,
|
||||
major: if major == 0 { 0 } else { major + 1 },
|
||||
minor: if major == 0 { Some(minor + 1) } else { None },
|
||||
patch: None,
|
||||
pre: Prerelease::EMPTY,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if this version is compatible with another version.
|
||||
pub fn is_compatible_with(&self, other: &Self) -> bool {
|
||||
self.lower.matches(&other.inner) && self.upper.matches(&other.inner)
|
||||
}
|
||||
|
||||
/// Converts from a collection of bytes into a version using the byte form major/minor/patch
|
||||
/// using big endian.
|
||||
pub const fn from_be_bytes(bytes: [u8; 24]) -> Self {
|
||||
Self::new(
|
||||
u64::from_be_bytes([
|
||||
bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
|
||||
]),
|
||||
u64::from_be_bytes([
|
||||
bytes[8], bytes[9], bytes[10], bytes[11], bytes[12], bytes[13], bytes[14],
|
||||
bytes[15],
|
||||
]),
|
||||
u64::from_be_bytes([
|
||||
bytes[16], bytes[17], bytes[18], bytes[19], bytes[20], bytes[21], bytes[22],
|
||||
bytes[23],
|
||||
]),
|
||||
)
|
||||
}
|
||||
|
||||
/// Converts the version into a byte form of major/minor/patch using big endian.
|
||||
pub const fn to_be_bytes(&self) -> [u8; 24] {
|
||||
let major = self.inner.major.to_be_bytes();
|
||||
let minor = self.inner.minor.to_be_bytes();
|
||||
let patch = self.inner.patch.to_be_bytes();
|
||||
|
||||
[
|
||||
major[0], major[1], major[2], major[3], major[4], major[5], major[6], major[7],
|
||||
minor[0], minor[1], minor[2], minor[3], minor[4], minor[5], minor[6], minor[7],
|
||||
patch[0], patch[1], patch[2], patch[3], patch[4], patch[5], patch[6], patch[7],
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Version {
|
||||
/// Default version is `0.0.0`.
|
||||
fn default() -> Self {
|
||||
Self::new(0, 0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Version {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<semver::Version> for Version {
|
||||
/// Creates a new [`Version`] using the major, minor, and patch information from
|
||||
/// [`semver::Version`].
|
||||
fn from(version: semver::Version) -> Self {
|
||||
let mut this = Self::new(version.major, version.minor, version.patch);
|
||||
this.inner = version;
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Version> for semver::Version {
|
||||
fn from(version: Version) -> Self {
|
||||
version.inner
|
||||
}
|
||||
}
|
@ -1,17 +1,8 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
#[doc = include_str!("../README.md")]
|
||||
#[cfg(doctest)]
|
||||
pub struct ReadmeDoctests;
|
||||
|
||||
mod authentication;
|
||||
pub mod client;
|
||||
pub mod common;
|
||||
pub mod manager;
|
||||
pub mod server;
|
||||
|
||||
pub use client::{Client, ReconnectStrategy};
|
||||
/// Authentication functionality tied to network operations.
|
||||
pub use distant_auth as auth;
|
||||
pub use server::Server;
|
||||
pub use {log, paste};
|
||||
|
@ -0,0 +1,212 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashSet;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{BitAnd, BitOr, BitXor};
|
||||
use std::str::FromStr;
|
||||
|
||||
use derive_more::{From, Into, IntoIterator};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumMessage, IntoEnumIterator};
|
||||
|
||||
use super::ManagerCapabilityKind;
|
||||
|
||||
/// Set of supported capabilities for a manager
|
||||
#[derive(Clone, Debug, From, Into, PartialEq, Eq, IntoIterator, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(transparent)]
|
||||
pub struct ManagerCapabilities(#[into_iterator(owned, ref)] HashSet<ManagerCapability>);
|
||||
|
||||
impl ManagerCapabilities {
|
||||
/// Return set of capabilities encompassing all possible capabilities
|
||||
pub fn all() -> Self {
|
||||
Self(
|
||||
ManagerCapabilityKind::iter()
|
||||
.map(ManagerCapability::from)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Return empty set of capabilities
|
||||
pub fn none() -> Self {
|
||||
Self(HashSet::new())
|
||||
}
|
||||
|
||||
/// Returns true if the capability with described kind is included
|
||||
pub fn contains(&self, kind: impl AsRef<str>) -> bool {
|
||||
let cap = ManagerCapability {
|
||||
kind: kind.as_ref().to_string(),
|
||||
description: String::new(),
|
||||
};
|
||||
self.0.contains(&cap)
|
||||
}
|
||||
|
||||
/// Adds the specified capability to the set of capabilities
|
||||
///
|
||||
/// * If the set did not have this capability, returns `true`
|
||||
/// * If the set did have this capability, returns `false`
|
||||
pub fn insert(&mut self, cap: impl Into<ManagerCapability>) -> bool {
|
||||
self.0.insert(cap.into())
|
||||
}
|
||||
|
||||
/// Removes the capability with the described kind, returning the capability
|
||||
pub fn take(&mut self, kind: impl AsRef<str>) -> Option<ManagerCapability> {
|
||||
let cap = ManagerCapability {
|
||||
kind: kind.as_ref().to_string(),
|
||||
description: String::new(),
|
||||
};
|
||||
self.0.take(&cap)
|
||||
}
|
||||
|
||||
/// Removes the capability with the described kind, returning true if it existed
|
||||
pub fn remove(&mut self, kind: impl AsRef<str>) -> bool {
|
||||
let cap = ManagerCapability {
|
||||
kind: kind.as_ref().to_string(),
|
||||
description: String::new(),
|
||||
};
|
||||
self.0.remove(&cap)
|
||||
}
|
||||
|
||||
/// Converts into vec of capabilities sorted by kind
|
||||
pub fn into_sorted_vec(self) -> Vec<ManagerCapability> {
|
||||
let mut this = self.0.into_iter().collect::<Vec<_>>();
|
||||
|
||||
this.sort_unstable();
|
||||
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl ManagerCapabilities {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(ManagerCapabilities)
|
||||
}
|
||||
}
|
||||
|
||||
impl BitAnd for &ManagerCapabilities {
|
||||
type Output = ManagerCapabilities;
|
||||
|
||||
fn bitand(self, rhs: Self) -> Self::Output {
|
||||
ManagerCapabilities(self.0.bitand(&rhs.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr for &ManagerCapabilities {
|
||||
type Output = ManagerCapabilities;
|
||||
|
||||
fn bitor(self, rhs: Self) -> Self::Output {
|
||||
ManagerCapabilities(self.0.bitor(&rhs.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ManagerCapability> for &ManagerCapabilities {
|
||||
type Output = ManagerCapabilities;
|
||||
|
||||
fn bitor(self, rhs: ManagerCapability) -> Self::Output {
|
||||
let mut other = ManagerCapabilities::none();
|
||||
other.0.insert(rhs);
|
||||
|
||||
self.bitor(&other)
|
||||
}
|
||||
}
|
||||
|
||||
impl BitXor for &ManagerCapabilities {
|
||||
type Output = ManagerCapabilities;
|
||||
|
||||
fn bitxor(self, rhs: Self) -> Self::Output {
|
||||
ManagerCapabilities(self.0.bitxor(&rhs.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<ManagerCapability> for ManagerCapabilities {
|
||||
fn from_iter<I: IntoIterator<Item = ManagerCapability>>(iter: I) -> Self {
|
||||
let mut this = ManagerCapabilities::none();
|
||||
|
||||
for capability in iter {
|
||||
this.0.insert(capability);
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
/// ManagerCapability tied to a manager. A capability is equivalent based on its kind and not
|
||||
/// description.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
pub struct ManagerCapability {
|
||||
/// Label describing the kind of capability
|
||||
pub kind: String,
|
||||
|
||||
/// Information about the capability
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
impl ManagerCapability {
|
||||
/// Will convert the [`ManagerCapability`]'s `kind` into a known [`ManagerCapabilityKind`] if
|
||||
/// possible, returning None if the capability is unknown
|
||||
pub fn to_capability_kind(&self) -> Option<ManagerCapabilityKind> {
|
||||
ManagerCapabilityKind::from_str(&self.kind).ok()
|
||||
}
|
||||
|
||||
/// Returns true if the described capability is unknown
|
||||
pub fn is_unknown(&self) -> bool {
|
||||
self.to_capability_kind().is_none()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ManagerCapability {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.kind.eq_ignore_ascii_case(&other.kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ManagerCapability {}
|
||||
|
||||
impl PartialOrd for ManagerCapability {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for ManagerCapability {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.kind
|
||||
.to_ascii_lowercase()
|
||||
.cmp(&other.kind.to_ascii_lowercase())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for ManagerCapability {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.kind.to_ascii_lowercase().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ManagerCapabilityKind> for ManagerCapability {
|
||||
/// Creates a new capability using the kind's default message
|
||||
fn from(kind: ManagerCapabilityKind) -> Self {
|
||||
Self {
|
||||
kind: kind.to_string(),
|
||||
description: kind
|
||||
.get_message()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl ManagerCapability {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(ManagerCapability)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl ManagerCapabilityKind {
|
||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
||||
schemars::schema_for!(ManagerCapabilityKind)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue