chore: add src files & cargo
This commit is contained in:
parent
7873f5defa
commit
0347eb7043
16 changed files with 6873 additions and 0 deletions
327
src/app.rs
Normal file
327
src/app.rs
Normal file
|
|
@ -0,0 +1,327 @@
|
|||
/*! Application-specific logic lives here
|
||||
|
||||
**TODO:** Look into moving the argument definition into a
|
||||
[build.rs](https://doc.rust-lang.org/cargo/reference/build-scripts.html) like in the
|
||||
[clap_generate](https://docs.rs/clap_generate/3.0.0-beta.1/clap_generate/fn.generate_to.html)
|
||||
examples so I don't have build the completion generation code into the output binary.
|
||||
*/
|
||||
|
||||
// Parts Copyright 2017-2021, Stephan Sokolow
|
||||
|
||||
// Standard library imports
|
||||
// use std::path::PathBuf;
|
||||
// use std::process;
|
||||
use std::fs;
|
||||
use std::collections::HashMap;
|
||||
|
||||
// 3rd-party crate imports
|
||||
use anyhow::Result;
|
||||
use structopt::StructOpt;
|
||||
|
||||
// Local Imports
|
||||
use crate::helpers::{BoilerplateOpts, HELP_TEMPLATE};
|
||||
// use crate::validators::path_readable_file;
|
||||
use crate::clouds::{on_cloud};
|
||||
use crate::defs::{BxDynResult};
|
||||
use clds::clouds::on_clouds::clear_specs;
|
||||
|
||||
use tkdr::crypt_lib::{encrypt, decrypt};
|
||||
use tkdr::tera_lib::{hash_from_data, data_templated, hash_content};
|
||||
use tkdr::randomkey::{RandomKey};
|
||||
use key_of_life::get_key;
|
||||
|
||||
/// The verbosity level when no `-q` or `-v` arguments are given, with `0` being `-q`
|
||||
pub const DEFAULT_VERBOSITY: u64 = 1;
|
||||
|
||||
|
||||
|
||||
/// Default KEY PATH
|
||||
pub const KEY_PATH: &str = ".k";
|
||||
/// Command-line argument schema
|
||||
///
|
||||
/// ## Relevant Conventions:
|
||||
///
|
||||
/// * Make sure that there is a blank space between the `<name>` `<version>` line and the
|
||||
/// description text or the `--help` output won't comply with the platform conventions that
|
||||
/// `help2man` depends on to generate your manpage. (Specifically, it will mistake the `<name>
|
||||
/// <version>` line for part of the description.)
|
||||
/// * `StructOpt`'s default behaviour of including the author name in the `--help` output is an
|
||||
/// oddity among Linux commands and, if you don't disable it, you run the risk of people
|
||||
/// unfamiliar with `StructOpt` assuming that you are an egotistical person who made a conscious
|
||||
/// choice to add it.
|
||||
///
|
||||
/// The proper standardized location for author information is the `AUTHOR` section which you
|
||||
/// can read about by typing `man help2man`.
|
||||
///
|
||||
/// ## Cautions:
|
||||
/// * Subcommands do not inherit `template` and it must be re-specified for each one.
|
||||
/// ([clap-rs/clap#1184](https://github.com/clap-rs/clap/issues/1184))
|
||||
/// * Double-check that your choice of `about` or `long_about` is actually overriding this
|
||||
/// doc comment. The precedence has some bugs such as
|
||||
/// [TeXitoi/structopt#391](https://github.com/TeXitoi/structopt/issues/391) and
|
||||
/// [TeXitoi/structopt#333](https://github.com/TeXitoi/structopt/issues/333).
|
||||
/// * Do not begin the description text for subcommands with `\n`. It will break the formatting in
|
||||
/// the top-level help output's list of subcommands.
|
||||
#[derive(StructOpt, Debug)]
|
||||
#[structopt(template = HELP_TEMPLATE,
|
||||
about = "Coder is a command utility to manage keys (generation,encrypt,hash) and make content with Tera templates",
|
||||
global_setting = structopt::clap::AppSettings::ColoredHelp)]
|
||||
pub struct CliOpts {
|
||||
#[allow(clippy::missing_docs_in_private_items)] // StructOpt compile-time errors if we doc this
|
||||
#[structopt(flatten)]
|
||||
pub boilerplate: BoilerplateOpts,
|
||||
|
||||
/// TskSrvc to run
|
||||
#[structopt(short, long)]
|
||||
pub tsksrvc: Option<String>,
|
||||
|
||||
/// Command to run on TskSrvc
|
||||
#[structopt(short, long)]
|
||||
pub cmd: Option<String>,
|
||||
|
||||
/// On next tsksrvc after TskSrvc selected ('next')
|
||||
#[structopt(short, long)]
|
||||
pub next: Option<String>,
|
||||
|
||||
/// Source cloud path
|
||||
#[structopt(short, long)]
|
||||
pub source: Option<String>,
|
||||
|
||||
/// list of hosts
|
||||
#[structopt(short, long)]
|
||||
pub listhosts: Option<String>,
|
||||
|
||||
/// Force running (-f, -ff etc.)
|
||||
#[structopt(short, long, parse(from_occurrences))]
|
||||
pub force: u8,
|
||||
/*
|
||||
/// Encrypt text
|
||||
#[structopt(short, long)]
|
||||
pub encrypt: Option<String>,
|
||||
|
||||
/// Encrypt file
|
||||
#[structopt(long)]
|
||||
fencrypt: Option<String>,
|
||||
|
||||
/// Decrypt text
|
||||
#[structopt(short, long)]
|
||||
decrypt: Option<String>,
|
||||
|
||||
/// Decrypt file
|
||||
#[structopt(long)]
|
||||
fdecrypt: Option<String>,
|
||||
|
||||
/// Random key (optional uuid format)
|
||||
#[structopt(short, long)]
|
||||
key: Option<String>,
|
||||
|
||||
/// Get Blake3 hash from text
|
||||
#[structopt(long)]
|
||||
hash: Option<String>,
|
||||
|
||||
/// Tera template with data
|
||||
#[structopt(short,long)]
|
||||
tera: Option<Vec<String>>,
|
||||
|
||||
/// Get uuid v4
|
||||
#[structopt(short,long)]
|
||||
uuid: Option<String>,
|
||||
|
||||
/// Input for tsksrvc
|
||||
#[structopt(short, long)]
|
||||
input: Option<String>,
|
||||
|
||||
/// Output for tsksrvc
|
||||
#[structopt(short, long)]
|
||||
output: Option<String>,
|
||||
|
||||
/// Output for http
|
||||
#[structopt(short, long)]
|
||||
http: Option<String>,
|
||||
*/
|
||||
// File(s) to use as input
|
||||
//
|
||||
// **TODO:** Figure out if there's a way to only enforce constraints on this when not asking
|
||||
// to dump completions.
|
||||
/*
|
||||
#[structopt(parse(from_os_str),
|
||||
validator_os = path_readable_file)
|
||||
]
|
||||
inpath: Vec<PathBuf>,
|
||||
*/
|
||||
/// Target
|
||||
#[structopt()]
|
||||
args: Vec<String>,
|
||||
}
|
||||
|
||||
/// The actual `main()`
|
||||
///
|
||||
/// Using a key and `XChaCha20Poly130` and random nonce
|
||||
/// -run encrypt text-to-encrypt
|
||||
/// -run decrypt text-to-decrypt
|
||||
///
|
||||
/// Format -key n for a key with n characters long
|
||||
/// Format -key n uuid for n items in uuid format with dashes
|
||||
#[allow(clippy::integer_arithmetic, clippy::restriction)]
|
||||
pub async fn main(opts: CliOpts) -> BxDynResult<String> { // anyhow::Result<()> {
|
||||
// println!("|{}|",&key);
|
||||
let key_path = &envmnt::get_or("KEY_PATH", KEY_PATH);
|
||||
let key = get_key(&key_path,None).await;
|
||||
// let key = get_key(KEY_PATH,None).await;
|
||||
if key.is_empty() {
|
||||
std::process::exit(0x0100);
|
||||
}
|
||||
let empty_res=String::from("");
|
||||
let mut run_tsksrvc = String::from("all");
|
||||
let mut run_cmd = String::from("");
|
||||
let mut run_next = String::from("");
|
||||
let mut listhosts = String::from("");
|
||||
if let Some(tsksrvc)= opts.tsksrvc {
|
||||
run_tsksrvc=String::from(&tsksrvc);
|
||||
}
|
||||
if let Some(cmd)=opts.cmd {
|
||||
run_cmd=String::from(&cmd);
|
||||
}
|
||||
if let Some(list_hosts)=opts.listhosts {
|
||||
listhosts=String::from(&list_hosts);
|
||||
}
|
||||
if let Some(nxt)=opts.next {
|
||||
run_next=String::from(&nxt);
|
||||
}
|
||||
if let Some(source)=opts.source {
|
||||
//let res = encrypt(&text, &key);
|
||||
match on_cloud(&run_tsksrvc, &run_cmd, &run_next, &source, &listhosts, opts.force).await {
|
||||
Ok(res) => return Ok(res),
|
||||
Err(e) => println!("Error on_cloud: {}",e),
|
||||
}
|
||||
if envmnt::get_or("KLDS_DEBUG", "").len() == 0 {
|
||||
println!("Clean on_cloud");
|
||||
match clear_specs(&source).await {
|
||||
Ok(_) => return Ok(empty_res),
|
||||
Err(e) => println!("Error on_cloud: {}",e),
|
||||
}
|
||||
}
|
||||
let now = chrono::Utc::now().timestamp();
|
||||
// println!("{}",&now);
|
||||
return Ok(empty_res);
|
||||
// TODO Chect to remove provision specs config
|
||||
// println!("{} - {}",&tsksrvc,&source);
|
||||
}
|
||||
/*
|
||||
if opts.force > 0 {
|
||||
println!("Force: {}",opts.force);
|
||||
return Ok(());
|
||||
}
|
||||
// dbg!(&opts);
|
||||
if let Some(text)= opts.encrypt {
|
||||
let res = encrypt(&text, &key);
|
||||
println!("{}",&res);
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(file)= opts.fencrypt {
|
||||
match fs::read_to_string(&file.as_str()) {
|
||||
Ok(content) => {
|
||||
let res = encrypt(&content, &key);
|
||||
println!("{}",&res);
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Unable to open {}\n {}",&file,e);
|
||||
},
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(text)= opts.decrypt {
|
||||
let res = decrypt(&text, &key);
|
||||
println!("{}",&res);
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(file)= opts.fdecrypt {
|
||||
match fs::read_to_string(&file.as_str()) {
|
||||
Ok(content) => {
|
||||
let res = decrypt(&content, &key);
|
||||
println!("{}",&res);
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Unable to open {}\n {}",&file,e);
|
||||
},
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
if core::option::Option::is_some(&opts.key) {
|
||||
let mut template = "";
|
||||
if let (Some(_),Some(templ)) = (opts.args.get(0),opts.args.get(1)) {
|
||||
template = templ.as_str();
|
||||
}
|
||||
if let Some(text_long) = opts.args.get(0) {
|
||||
match text_long.parse::<u32>() {
|
||||
Ok(lon) => {
|
||||
let random = RandomKey::config(lon,false);
|
||||
if template == "" {
|
||||
println!("{}", random.get_key());
|
||||
} else {
|
||||
for ky in random.from_template(template, lon) {
|
||||
println!("{}",ky);
|
||||
}
|
||||
};
|
||||
},
|
||||
Err(e) => println!("Error number conversion {}",e),
|
||||
};
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
// if core::option::Option::is_some(&opts.tera) {
|
||||
if let Some(tera) = opts.tera {
|
||||
if let (Some(data_file),Some(tpl_file))= (tera.get(0),tera.get(1)) {
|
||||
let mut data_hash: HashMap<String, String> = HashMap::new();
|
||||
let mut tpl_context = tera::Context::new();
|
||||
// println!("data: {} template: {}",data_file,tpl_file);
|
||||
match hash_from_data(data_file, &mut tpl_context, &mut data_hash, true) {
|
||||
Ok(_) => {
|
||||
match data_templated(&"".to_string(),tpl_file,&mut tpl_context, &data_hash) {
|
||||
Ok(result) => println!("{}",result),
|
||||
Err(e) => println!("Error with {} data in {} template: {}", data_file, tpl_file, e),
|
||||
}
|
||||
},
|
||||
Err(e) => println!("Error with {} data in {} template: {}", data_file, tpl_file, e),
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
if let Some(text) = opts.hash {
|
||||
let res = hash_content(&text);
|
||||
println!("{}",res);
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(_) = opts.uuid {
|
||||
println!("{}", uuid::Uuid::new_v4());
|
||||
return Ok(());
|
||||
}
|
||||
*/
|
||||
// if let Some(_) = opts.http {
|
||||
// let _ =http_main().await;
|
||||
// return Ok(());
|
||||
// }
|
||||
/*
|
||||
for inpath in opts.inpath {
|
||||
// todo!("Implement application logic")
|
||||
println!("Target: {}",inpath.display());
|
||||
}
|
||||
|
||||
*/
|
||||
Ok(empty_res)
|
||||
}
|
||||
|
||||
// Tests go below the code where they'll be out of the way when not the target of attention
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::CliOpts;
|
||||
|
||||
// TODO: Unit test to verify that the doc comments on `CliOpts` or `BoilerplateOpts` aren't
|
||||
// overriding the intended about string.
|
||||
|
||||
#[test]
|
||||
/// Test something
|
||||
fn test_something() {
|
||||
// TODO: Test something
|
||||
}
|
||||
}
|
||||
177
src/clouds.rs
Normal file
177
src/clouds.rs
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
use std::fs; //, io};
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use rfm::mkdir;
|
||||
use std::collections::HashMap;
|
||||
use serde_yaml::Value;
|
||||
use serde_dhall;
|
||||
// use std::process::{Command};
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use anyhow::{anyhow,Result,Context,Error};
|
||||
use tkdr::tera_lib::make_template_content;
|
||||
|
||||
use clds::pkgs::{get_pkgs_vers,PkgInfo,PkgVers};
|
||||
use clds::clouds::defs::{
|
||||
CfgRs,
|
||||
CloudEnv,
|
||||
Cloud,
|
||||
ConfigResources,
|
||||
Server,
|
||||
TskSrvc,
|
||||
MainResourcesConfig,
|
||||
};
|
||||
use clds::clouds::on_clouds::env_cloud;
|
||||
|
||||
use clds::tsksrvcs::run_tsksrvcs_on_providers;
|
||||
use crate::defs::{load_key};
|
||||
use crate::errors::AppError;
|
||||
use crate::utils;
|
||||
|
||||
// use tempfile::tempfile;
|
||||
// use std::fs::File;
|
||||
//use base64_stream::FromBase64Writer;
|
||||
|
||||
/// On_cloud
|
||||
/// Load env and config files
|
||||
/// Load `tsksrvc` (can be group of tsksrvcs)
|
||||
/// On each `element` in `config` run `tsksrvc`
|
||||
pub async fn on_cloud(tsksrvc: &str, cmd: &str, nxt: &str, source: &str, listhosts: &str, force: u8) -> Result<String> {
|
||||
let mut cloud = Cloud::default();
|
||||
cloud.env = CloudEnv::new(force, load_key().await);
|
||||
cloud.providers = Cloud::load_providers().await;
|
||||
env_cloud(source, &mut cloud.env).await?;
|
||||
get_pkgs_vers(&mut cloud).await?;
|
||||
cloud.env.listhosts = String::from(listhosts);
|
||||
|
||||
let cfg_path = format!("{}/{}/{}/{}",&cloud.env.home,&source,&cloud.env.config_root,&cloud.env.config_path);
|
||||
let mut cfg_data = fs::read_to_string(&cfg_path).with_context(|| format!("Failed to read 'cfg_path' from {}", &cfg_path))?;
|
||||
|
||||
let mut cfg: MainResourcesConfig = serde_yaml::from_str(&cfg_data)?;
|
||||
let provider = cloud.providers.get(&cfg.provider).with_context(|| format!("Provider '{}'' not defined", &cfg.provider))?;
|
||||
|
||||
// Load tsksrvcs.yaml into TskSrvcs
|
||||
|
||||
// for (i, tsk) in tsksrvcs.tsksrvcsList.iter().enumerate() {
|
||||
// if tsksrvc == "all" || tsk.name.as_str() == tsksrvc {
|
||||
// match tsk.target.as_str() {
|
||||
// "servers" => on_cloud_server(&mut cloud, i, tsk).await?,
|
||||
// _ => println!("Target '{}' undefined from {}", &tsk.target, cloud.env.config_path),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
//let tsksrvcs_path = format!("{}/home/{}/tsksrvcs.yaml",&cloud.env.path,&source);
|
||||
//let mut tsksrvcs_data = fs::read_to_string(&tsksrvcs_path).with_context(|| format!("Failed to read 'tsksrvcs_path' from {}", &tsksrvcs_path))?;
|
||||
//let mut tsksrvcs: TskSrvcs = serde_yaml::from_str(&tsksrvcs_data)?; // .with_context(|| format!("Failed to parse 'tsksrvcs_path' from {}", &tsksrvcs_path))?;
|
||||
|
||||
run_tsksrvcs_on_providers(provider, &cfg_data, &cloud, tsksrvc, &cmd, &nxt, &cfg).await?;
|
||||
// .map_err(|e| AppError::ErrorInternalServerError(format!("Template error {}",e)))?;
|
||||
|
||||
//utils::tera_render(tera: &mut Tera, tpl_context: tera::Context, tpl: &str, output_path: &str, append: bool).await?;
|
||||
// dbg!(&cloud);
|
||||
// for (i, elem) in config_res.servers.iter().enumerate() {
|
||||
// dbg!(&elem);
|
||||
// }
|
||||
// // println!("DONE: {} - {}",&tsksrvc,&source);
|
||||
// if &envmnt::get_or("WEB_MODE", "") == "" {
|
||||
// // println!("{}",&str_config_res);
|
||||
// println!("DONE: {} ",&cfg_path);
|
||||
// }
|
||||
Ok("done".to_string())
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn get_packages_serde_value(src: &Option<HashMap<String,HashMap<String,serde_yaml::Value>>>, key: &str, target: &str, dflt: String) -> String {
|
||||
let mut target_value= String::from("");
|
||||
if let Some(pkgs) = &src {
|
||||
if let Some(val) = pkgs.get(key) {
|
||||
if let Some(item) = val.get(target) {
|
||||
dbg!(item);
|
||||
match item.as_str() {
|
||||
Some(v) => target_value=v.to_string(),
|
||||
None => target_value=dflt.to_owned(),
|
||||
}
|
||||
// target_value = tkdr::tera_lib::get_yaml_val(item,dflt);
|
||||
};
|
||||
}
|
||||
}
|
||||
target_value
|
||||
}
|
||||
|
||||
pub async fn create_full_config(cloud: &mut Cloud) -> Result<()> {
|
||||
let mut output_path = format!("{}/specs",&cloud.env.provision);
|
||||
if ! Path::new(&output_path).exists() {
|
||||
fs::create_dir(&output_path)?;
|
||||
println!("{} created", &output_path);
|
||||
}
|
||||
output_path = format!("{}/specs/config.yaml",&cloud.env.provision);
|
||||
if Path::new(&output_path).exists() && cloud.env.force < 1u8 {
|
||||
println!("Found created {}", &output_path);
|
||||
return Ok(());
|
||||
}
|
||||
let mut config_resources = cloud.config_resources.to_owned();
|
||||
config_resources.servers = Vec::new();
|
||||
let mut file =std::fs::File::create(&output_path).with_context(|| format!("Failed to open to file: {}",&output_path))?;
|
||||
let mut str_config_resources = serde_yaml::to_string(&config_resources).with_context(|| format!("Failed creating yaml from config_resources in {}",&cloud.env.provision))?;
|
||||
str_config_resources = str_config_resources.replace("servers: []","servers: \n");
|
||||
file.write(&str_config_resources.as_bytes())?;
|
||||
// fs.write apped to fs::write!(&output_path);
|
||||
/*
|
||||
for (i, elem) in cloud.config_resources.servers.iter().enumerate() {
|
||||
if let Some(tpl) = &elem.tpl {
|
||||
let tpl_path = get_env_path("",&tpl, &cloud.env.source , &cloud.env.tpls_path,true).await?;
|
||||
let tpl_content= fs::read_to_string(&tpl_path).with_context(|| format!("Failed to read 'tpl_path' from {}", &tpl_path))?;
|
||||
// println!("Template {} -> {}",&tpl, &tpl_path);
|
||||
utils::tpl_data_server(&elem, &tpl_content, &output_path, true).await.with_context(|| format!("Failed 'ConfigResources' template {}",&tpl_path))?;
|
||||
if let Some(specs) = &elem.spec {
|
||||
if let Some(pkgs_tpl) = &specs.tplPkgs {
|
||||
let pkgs_tpl_path = get_env_path("",&pkgs_tpl, &cloud.env.source , &cloud.env.tpls_path, true).await?;
|
||||
let pkgs_tpl_content= fs::read_to_string(&pkgs_tpl_path).with_context(|| format!("Failed to read 'pkgs_tpl_path' from {}", &pkgs_tpl_path))?;
|
||||
utils::tpl_data_map(&cloud.env.pkgs_vers, &pkgs_tpl_content, &output_path, true).await.with_context(|| format!("Failed render 'versions' on template {}",&pkgs_tpl_path))?;
|
||||
// let mut file = OpenOptions::new().append(true).open(&output_path)?;
|
||||
// file.write(&pkgs_tpl_content.as_bytes())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
println!("Full 'config_resources' created");
|
||||
Ok(())
|
||||
}
|
||||
/// On_cloud_servers run tsksrvcs
|
||||
pub async fn on_cloud_server(cloud: &mut Cloud, pos: usize, tsksrvc: &TskSrvc) -> Result<()> {
|
||||
let mut tsksrvc_path = String::from("");
|
||||
if tsksrvc.path.len() > 0 {
|
||||
tsksrvc_path = get_env_path(format!("CLOUDS_TSKSRVC_{}",&tsksrvc.name).as_str(), &tsksrvc.path, "" , &cloud.env.root_tsksrvcs,true).await?;
|
||||
}
|
||||
match tsksrvc.name.as_str() {
|
||||
"createserver" | "modifyip" | "startserver" => {
|
||||
for (i, elem) in cloud.config_resources.servers.iter().enumerate() {
|
||||
println!("TskSrvc {}th {} in {}th {}: {} ", pos+1, &tsksrvc.name,i+1, &tsksrvc.target, &elem.hostname);
|
||||
println!("{}",&tsksrvc_path);
|
||||
if let Some(provider) = &cloud.config_resources.provider {
|
||||
match provider.as_str() {
|
||||
"upcloud" => if let Some(provider_def) = cloud.providers.get(provider) {
|
||||
let cmd = format!("{} {} {}",&provider_def.runner,&provider_def.args, &tsksrvc.name);
|
||||
println!("Provider '{}' to create '{} -f {}/config_resources.yaml",provider,&cmd, &cloud.env.provision);
|
||||
},
|
||||
"manual" => {
|
||||
println!("Provider '{}' create manually",&provider);
|
||||
if tsksrvc.path.len() > 0 {
|
||||
tkdr::utils::run_command(&tsksrvc.path,"","")?;
|
||||
}
|
||||
},
|
||||
_ => println!("Provider '{}' to create '{}' undefined",&provider,&elem.hostname),
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => println!("TskSrvc '{}' undefined on {}", &tsksrvc.name, &tsksrvc.target),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
*/
|
||||
73
src/cmds.rs
Normal file
73
src/cmds.rs
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
// use openssh::{Session, KnownHosts};
|
||||
use openssh::*;
|
||||
use std::io;
|
||||
use std::process::Stdio;
|
||||
use tokio::io::{AsyncWriteExt,AsyncReadExt};
|
||||
|
||||
/// ssh
|
||||
/// ````rust
|
||||
/// // Prepare address
|
||||
/// let host= String::from("hostname-or-ip");
|
||||
/// let user= String::from("root");
|
||||
/// let port: u16 = 22;
|
||||
/// let addr = format!("ssh://{}@{}:{}",&user,&host,port);
|
||||
///
|
||||
/// // for scp_to_add data content into /tmp/hola
|
||||
/// let tsksrvc= String::from("scp_to_add");
|
||||
/// let trgt=String::from("/tmp/hola");
|
||||
/// let mut data = String::from("Dime \n");
|
||||
//
|
||||
/// // for ssh ls /tmp
|
||||
/// let tsksrvc= String::from("ssh");
|
||||
/// let trgt=String::from("ls");
|
||||
/// let mut data = String::from("/tmp");
|
||||
///
|
||||
/// // Call command and "macth" result
|
||||
/// match cmds::ssh(&tsksrvc, &addr, &trgt, &mut data) {
|
||||
/// Ok(rs) => println!("ssh res: {:?} -> {:?}", rs, &data),
|
||||
/// Err(e) => println!("ssh error: {:?}", e),
|
||||
/// }
|
||||
/// ```
|
||||
// #[tokio::main]
|
||||
pub async fn ssh(tsksrvc: &str, addr: &str, trgt: &str, data: &mut String ) -> anyhow::Result<()> {
|
||||
|
||||
let session = Session::connect(&addr,KnownHosts::Strict).await?;
|
||||
|
||||
if tsksrvc == "ssh" {
|
||||
let ls = session.command(trgt).arg(data).output().await?;
|
||||
match String::from_utf8(ls.stdout) {
|
||||
Ok(res) => println!("ls : {:?}",&res),
|
||||
Err(e) => println!("Error {:?}",e),
|
||||
};
|
||||
} else {
|
||||
let mut sftp = session.sftp();
|
||||
match tsksrvc {
|
||||
"scp_to" => {
|
||||
let mut w = sftp.write_to(trgt).await?;
|
||||
let content = data.as_bytes();
|
||||
w.write_all(content).await?;
|
||||
w.close().await?;
|
||||
},
|
||||
"scp_to_add" => {
|
||||
let mut w = sftp.append_to(trgt).await?;
|
||||
let content = data.as_bytes();
|
||||
w.write_all(content).await?;
|
||||
w.close().await?;
|
||||
},
|
||||
"scp_from" => {
|
||||
let mut r = sftp.read_from(trgt).await?;
|
||||
r.read_to_string(data).await?;
|
||||
// println!("source: {:?}",&data);
|
||||
r.close().await?;
|
||||
},
|
||||
_ => println!("Undefined {:?}",&tsksrvc),
|
||||
};
|
||||
}
|
||||
session.close().await?;
|
||||
Ok(())
|
||||
}
|
||||
// println!("SSH error no KeyPair found");
|
||||
// .map_err(|e| {
|
||||
// debug!("e = {:?}", e);
|
||||
// Error::SendError
|
||||
// })?;
|
||||
15
src/defs.rs
Normal file
15
src/defs.rs
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
|
||||
pub type BxDynResult<T> = std::result::Result<T, Box<dyn std::error::Error>>;
|
||||
|
||||
pub const KEY_PATH: &str = ".k";
|
||||
use key_of_life::get_key;
|
||||
|
||||
pub async fn load_key() -> String {
|
||||
let key_path = envmnt::get_or("KEY_PATH", KEY_PATH);
|
||||
let key = get_key(&key_path,None).await;
|
||||
if key.is_empty() {
|
||||
std::process::exit(0x0100);
|
||||
}
|
||||
key
|
||||
}
|
||||
89
src/dsc.rs
Normal file
89
src/dsc.rs
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
use tokio::io::{AsyncWriteExt,AsyncReadExt};
|
||||
/*
|
||||
|
||||
format as json with: { nm="name",v="version",h="hash",ctx="",ath="Auth",p="perms",rqmt="requiremtes",rqst="request",...,cod=1,cry=1, data }
|
||||
data is a dhall content: if cod = 1 is encoded, cry = 1 is encrypted
|
||||
|
||||
data:
|
||||
- about
|
||||
- request
|
||||
- tsksrvcs
|
||||
- config
|
||||
- tpls
|
||||
- lang
|
||||
- defs
|
||||
|
||||
|
||||
Header
|
||||
- Read Header
|
||||
- Create Header
|
||||
Pack
|
||||
- UnPack Data
|
||||
- Pack Data
|
||||
|
||||
Run
|
||||
- Request
|
||||
- TskSrvc
|
||||
|
||||
Create LDSC
|
||||
|
||||
Attach Data - CRUD
|
||||
|
||||
|
||||
|
||||
if path.ends_with(".dhall") { // As source dhall
|
||||
|
||||
} else if path.ends_with(".dhallb") { // As encoded dhallb
|
||||
|
||||
} else if path.ends_with(".dhllb") { // As base64 + encrypt + encoded dhllb
|
||||
|
||||
} else { // As base64 + encrypt dhll or no extension
|
||||
|
||||
}
|
||||
*/
|
||||
|
||||
#[tokio::main]
|
||||
pub async fn dsc(tsksrvc: &str, addr: &str, trgt: &str, data: &mut String ) -> anyhow::Result<()> {
|
||||
/*
|
||||
let session = Session::connect(&addr,KnownHosts::Strict).await?;
|
||||
|
||||
if tsksrvc == "ssh" {
|
||||
let ls = session.command(trgt).arg(data).output().await?;
|
||||
match String::from_utf8(ls.stdout) {
|
||||
Ok(res) => println!("ls : {:?}",&res),
|
||||
Err(e) => println!("Error {:?}",e),
|
||||
};
|
||||
} else {
|
||||
let mut sftp = session.sftp();
|
||||
match tsksrvc {
|
||||
"scp_to" => {
|
||||
let mut w = sftp.write_to(trgt).await?;
|
||||
let content = data.as_bytes();
|
||||
w.write_all(content).await?;
|
||||
w.close().await?;
|
||||
},
|
||||
"scp_to_add" => {
|
||||
let mut w = sftp.append_to(trgt).await?;
|
||||
let content = data.as_bytes();
|
||||
w.write_all(content).await?;
|
||||
w.close().await?;
|
||||
},
|
||||
"scp_from" => {
|
||||
let mut r = sftp.read_from(trgt).await?;
|
||||
r.read_to_string(data).await?;
|
||||
// println!("source: {:?}",&data);
|
||||
r.close().await?;
|
||||
},
|
||||
_ => println!("Undefined {:?}",&tsksrvc),
|
||||
}
|
||||
}
|
||||
|
||||
session.close().await?;
|
||||
*/
|
||||
Ok(())
|
||||
}
|
||||
// println!("SSH error no KeyPair found");
|
||||
// .map_err(|e| {
|
||||
// debug!("e = {:?}", e);
|
||||
// Error::SendError
|
||||
// })?;
|
||||
147
src/errors.rs
Normal file
147
src/errors.rs
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
/// Main errors definition
|
||||
//
|
||||
// Copyright 2020, Jesús Pérez Lorenzo
|
||||
//
|
||||
use failure::Fail;
|
||||
// use diesel::result::DatabaseErrorKind::UniqueViolation;
|
||||
// use diesel::result::Error::{DatabaseError, NotFound};
|
||||
use std::fmt;
|
||||
|
||||
///`AppError` Aplication Errors definition ans display
|
||||
///
|
||||
#[derive(Debug)]
|
||||
pub enum AppError {
|
||||
/// when toke not valid
|
||||
NoValidToken,
|
||||
NoValidSession,
|
||||
SSLModeError,
|
||||
RunningModeError,
|
||||
UndefinedCollection,
|
||||
RecordAlreadyExists,
|
||||
RecordNotFound,
|
||||
DatabaseError,
|
||||
NoDataStorePool,
|
||||
NoAppEnvLoaded,
|
||||
NoCertsLoaded,
|
||||
SqlDeleteError,
|
||||
HasherError,
|
||||
MailSendError(String),
|
||||
MailError,
|
||||
// DatabaseError(diesel::result::Error),
|
||||
OperationCanceled,
|
||||
}
|
||||
|
||||
#[allow(clippy::pattern_type_mismatch)]
|
||||
impl fmt::Display for AppError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
AppError::NoValidToken => write!(f, "No valid token found"),
|
||||
AppError::NoValidSession => write!(f, "No valid session found"),
|
||||
AppError::SSLModeError => write!(f, "SSL Mode error"),
|
||||
AppError::RunningModeError => write!(f, "No valid run mode"),
|
||||
AppError::UndefinedCollection => write!(f, "Collection undefined"),
|
||||
AppError::RecordAlreadyExists => write!(f, "This record violates a unique constraint"),
|
||||
AppError::RecordNotFound => write!(f, "This record does not exist"),
|
||||
AppError::NoDataStorePool => write!(f, "No data store pool"),
|
||||
AppError::NoAppEnvLoaded => write!(f, "Application environment not loaded.\nReview APP_CONFIG_PATH and config.toml content "),
|
||||
AppError::NoCertsLoaded => write!(f, "Certifcations not loaded. Review APP_CONFIG_PATH certs_store_path"),
|
||||
AppError::SqlDeleteError => write!(f, "Sql Delete error"),
|
||||
// AppError::DatabaseError(e) => write!(f, "Database error: {:?}", e),
|
||||
AppError::MailSendError(e) => write!(f, "Mail send error: {:?}", e),
|
||||
AppError::MailError => write!(f, "Mail error "),
|
||||
AppError::DatabaseError => write!(f, "Database error "),
|
||||
AppError::HasherError => write!(f, "Hasher error "),
|
||||
AppError::OperationCanceled => write!(f, "The running operation was canceled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
/*
|
||||
/*
|
||||
impl From<diesel::result::Error> for AppError {
|
||||
fn from(e: diesel::result::Error) -> Self {
|
||||
match e {
|
||||
// DatabaseError(UniqueViolation, _) => AppError::RecordAlreadyExists,
|
||||
NotFound => AppError::RecordNotFound,
|
||||
_ => AppError::DatabaseError(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
impl From<BlockingError<AppError>> for AppError {
|
||||
fn from(e: BlockingError<AppError>) -> Self {
|
||||
match e {
|
||||
BlockingError::Error(inner) => inner,
|
||||
BlockingError::Canceled => AppError::OperationCanceled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ErrorResponse {
|
||||
err: String,
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for AppError {
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
let err = format!("{}", self);
|
||||
let mut builder = match self {
|
||||
AppError::RecordAlreadyExists => HttpResponse::BadRequest(),
|
||||
AppError::RecordNotFound => HttpResponse::NotFound(),
|
||||
_ => HttpResponse::InternalServerError(),
|
||||
};
|
||||
builder.json(ErrorResponse { err })
|
||||
}
|
||||
|
||||
// fn render_response(&self) -> HttpResponse {
|
||||
// self.error_response()
|
||||
// }
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum AppCertificateError {
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
BadFile(String, String),
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
FileReadError(String, String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum TokenErrors {
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
TokenEncodingFailed(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
TokenDecodingFailed(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
MissingServerRefreshToken(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
InvalidServerRefreshToken(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
InvalidClientAuthenticationToken(String, String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum LoginFailed {
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
MissingPassword(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
InvalidPassword(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
InvalidTokenOwner(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
PasswordHashingFailed(String, String),
|
||||
|
||||
#[fail(display = "{} - Reason: {}", 0, 1)]
|
||||
PasswordVerificationFailed(String, String),
|
||||
|
||||
}
|
||||
*/
|
||||
54
src/helpers.rs
Normal file
54
src/helpers.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
/*! Functions and templates which can be imported by `app.rs` to save effort */
|
||||
// Copyright 2017-2019, Stephan Sokolow
|
||||
|
||||
use structopt::{clap, StructOpt};
|
||||
|
||||
/// Modified version of Clap's default template for proper help2man compatibility
|
||||
///
|
||||
/// Used as a workaround for:
|
||||
/// 1. Clap's default template interfering with `help2man`'s proper function
|
||||
/// ([clap-rs/clap/#1432](https://github.com/clap-rs/clap/issues/1432))
|
||||
/// 2. Workarounds involving injecting `\n` into the description breaking help output if used
|
||||
/// on subcommand descriptions.
|
||||
pub const HELP_TEMPLATE: &str = "{bin} {version}
|
||||
|
||||
{about}
|
||||
|
||||
TskSrvcs:
|
||||
- Create a Random Key
|
||||
- Hash content Blake3
|
||||
- Encrypt/Decrypt XChaCha20Poly1305 and random nonce
|
||||
- Tera template with json-data
|
||||
|
||||
USAGE:
|
||||
{usage}
|
||||
|
||||
|
||||
{all-args}
|
||||
";
|
||||
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
// Can't doc-comment until TeXitoi/structopt#333
|
||||
// Options used by boilerplate code in `main.rs`
|
||||
//
|
||||
// FIXME: Report that StructOpt trips Clippy's `cast_possible_truncation` lint unless I use
|
||||
// `u64` for my `from_occurrences` inputs, which is a ridiculous state of things.
|
||||
#[derive(StructOpt, Debug)]
|
||||
#[structopt(rename_all = "kebab-case")]
|
||||
pub struct BoilerplateOpts {
|
||||
/// Decrease verbosity (-q, -qq, -qqq, etc.)
|
||||
#[structopt(short, long, parse(from_occurrences))]
|
||||
pub quiet: u64,
|
||||
|
||||
/// Increase verbosity (-v, -vv, -vvv, etc.)
|
||||
#[structopt(short, long, parse(from_occurrences))]
|
||||
pub verbose: u64,
|
||||
|
||||
/// Display timestamps on log messages (sec, ms, ns, none)
|
||||
#[structopt(long, value_name = "resolution")]
|
||||
pub timestamp: Option<stderrlog::Timestamp>,
|
||||
|
||||
/// Write a completion definition for the specified shell to stdout (bash, zsh, etc.)
|
||||
#[structopt(long, value_name = "shell")]
|
||||
pub dump_completions: Option<clap::Shell>,
|
||||
}
|
||||
148
src/main.rs
Normal file
148
src/main.rs
Normal file
|
|
@ -0,0 +1,148 @@
|
|||
/*! TODO: Application description here
|
||||
|
||||
This file provided by [rust-cli-boilerplate](https://github.com/ssokolow/rust-cli-boilerplate)
|
||||
*/
|
||||
// Copyright 2017-2021, Stephan Sokolow
|
||||
|
||||
// Make rustc's built-in lints more strict and set clippy into a whitelist-based configuration so
|
||||
// we see new lints as they get written, then opt out of ones we have seen and don't want
|
||||
#![warn(warnings, rust_2018_idioms)]
|
||||
#![warn(clippy::all, clippy::pedantic, clippy::restriction)]
|
||||
#![allow(clippy::float_arithmetic, clippy::implicit_return, clippy::needless_return)]
|
||||
#![forbid(unsafe_code)] // Enforce my policy of only allowing it in my own code as a last resort
|
||||
|
||||
#![allow(warnings)]
|
||||
#![allow(unused_imports)]
|
||||
|
||||
// stdlib imports
|
||||
//use std::{any, convert::TryInto};
|
||||
use std::{convert::TryInto};
|
||||
use std::io;
|
||||
|
||||
// 3rd-party imports
|
||||
use anyhow::{Context, Result};
|
||||
use structopt::{clap, StructOpt};
|
||||
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
use tkdr::crypt_lib::{encrypt, decrypt};
|
||||
use tkdr::tera_lib::{hash_from_data, data_templated, hash_content};
|
||||
use tkdr::randomkey::{RandomKey};
|
||||
use key_of_life::get_key;
|
||||
|
||||
// use log::{debug, error, info, trace, warn};
|
||||
// use actix_web::{guard, middleware, web, App, HttpServer};
|
||||
use dotenv::dotenv;
|
||||
use tokio::fs::File;
|
||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||
use hyper::service::{make_service_fn, service_fn};
|
||||
use hyper::{Body, Method, Request, Response, Server, StatusCode};
|
||||
|
||||
use crate::defs::{BxDynResult};
|
||||
|
||||
/// The verbosity level when no `-q` or `-v` arguments are given, with `0` being `-q`
|
||||
pub const DEFAULT_VERBOSITY: u64 = 1;
|
||||
|
||||
/// Default KEY PATH
|
||||
pub const KEY_PATH: &str = ".k";
|
||||
// Local imports
|
||||
mod app;
|
||||
mod helpers;
|
||||
mod cmds;
|
||||
// mod dsc;
|
||||
mod defs;
|
||||
mod utils;
|
||||
mod errors;
|
||||
mod clouds;
|
||||
mod web;
|
||||
|
||||
/// cmd main
|
||||
// async fn cmd_main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
async fn cmd_main() -> BxDynResult<String> {
|
||||
// Parse command-line arguments (exiting on parse error, --version, or --help)
|
||||
let opts = app::CliOpts::from_args();
|
||||
|
||||
// Configure logging output so that -q is "decrease verbosity" rather than instant silence
|
||||
let verbosity = opts
|
||||
.boilerplate
|
||||
.verbose
|
||||
.saturating_add(app::DEFAULT_VERBOSITY)
|
||||
.saturating_sub(opts.boilerplate.quiet);
|
||||
|
||||
stderrlog::new()
|
||||
.module(module_path!())
|
||||
.quiet(verbosity == 0)
|
||||
.verbosity(verbosity.saturating_sub(1).try_into().context("Verbosity too high")?)
|
||||
.timestamp(opts.boilerplate.timestamp.unwrap_or(stderrlog::Timestamp::Off))
|
||||
.init()
|
||||
.context("Failed to initialize logging output")?;
|
||||
// If requested, generate shell completions and then exit with status of "success"
|
||||
opts.boilerplate.dump_completions.map_or(app::main(opts).await, |shell| {
|
||||
app::CliOpts::clap().gen_completions_to(
|
||||
app::CliOpts::clap().get_bin_name().unwrap_or_else(|| clap::crate_name!()),
|
||||
shell,
|
||||
&mut io::stdout(),
|
||||
);
|
||||
Ok(String::from(""))
|
||||
})
|
||||
// if let Some(shell) = opts.boilerplate.dump_completions {
|
||||
// app::CliOpts::clap().gen_completions_to(
|
||||
// app::CliOpts::clap().get_bin_name().unwrap_or_else(|| clap::crate_name!()),
|
||||
// shell,
|
||||
// &mut io::stdout(),
|
||||
// );
|
||||
// Ok(())
|
||||
// } else {
|
||||
// // Run the actual `main` and rely on `impl Termination` to provide a simple, concise way to
|
||||
// // allow terminal errors that can be changed later as needed but starts out analogous to
|
||||
// // letting an unhandled exception bubble up in something like Python.
|
||||
// // TODO: Experiment with this and look for ways to polish it up further
|
||||
// app::main(opts)
|
||||
// }
|
||||
}
|
||||
/// Boilerplate to parse command-line arguments, set up logging, and handle bubbled-up `Error`s.
|
||||
///
|
||||
/// See `app::main` for the application-specific logic.
|
||||
// #[actix_rt::main]
|
||||
// async fn main() -> std::io::Result<()> { // Result<()> {
|
||||
#[tokio::main]
|
||||
async fn main() -> std::io::Result<()> { // Result<()> {
|
||||
// // println!("|{}|",&key);
|
||||
// let key_path = &envmnt::get_or("KEY_PATH", KEY_PATH);
|
||||
// let key = get_key(&key_path,None).await;
|
||||
// // let key = get_key(KEY_PATH,None).await;
|
||||
// if key.is_empty() {
|
||||
// std::process::exit(0x0100);
|
||||
// }
|
||||
// cmd_main().await;
|
||||
|
||||
let web_mode = &envmnt::get_or("WEB_MODE", "");
|
||||
if web_mode == "" {
|
||||
cmd_main().await;
|
||||
} else {
|
||||
let opts = app::CliOpts::from_args();
|
||||
// dbg!(opts);
|
||||
match app::main(opts).await {
|
||||
Ok(res) => println!("{}",res),
|
||||
Err(e) => println!("Errror: {}",e),
|
||||
}
|
||||
pretty_env_logger::init();
|
||||
|
||||
let addr = "127.0.0.1:1337".parse().unwrap();
|
||||
|
||||
let make_service =
|
||||
make_service_fn(|_| async { Ok::<_, hyper::Error>(service_fn(web::response_examples)) });
|
||||
|
||||
let server = Server::bind(&addr).serve(make_service);
|
||||
|
||||
println!("Listening on http://{}", addr);
|
||||
|
||||
if let Err(e) = server.await {
|
||||
eprintln!("server error: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
// vim: set sw=4 sts=4 expandtab :
|
||||
30
src/routes.rs
Normal file
30
src/routes.rs
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
/// Router modules loader
|
||||
//
|
||||
// Copyright 2021, Jesús Pérez Lorenzo
|
||||
//
|
||||
// use crate::errors::AppError;
|
||||
// use actix_web::HttpResponse;
|
||||
/// Main routes
|
||||
// pub(super) mod base;
|
||||
pub(super) mod main_gets;
|
||||
// pub(super) mod main_posts;
|
||||
// pub(super) mod adm_gets;
|
||||
// pub(super) mod adm_posts;
|
||||
// // pub(super) mod principal;
|
||||
// /// Auth routes login, tokens, etc
|
||||
// pub(super) mod gql;
|
||||
// // pub(super) mod auth;
|
||||
// pub(super) mod files;
|
||||
// pub(super) mod utils;
|
||||
// pub(super) mod reqtsksrvcs;
|
||||
/*
|
||||
fn convert<T, E>(res: Result<T, E>) -> Result<HttpResponse, AppError>
|
||||
where
|
||||
T: serde::Serialize,
|
||||
AppError: From<E>,
|
||||
{
|
||||
res.map(|d| HttpResponse::Ok().json(d))
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
*/
|
||||
127
src/utils.rs
Normal file
127
src/utils.rs
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
use std::fs; //, io};
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::collections::HashMap;
|
||||
use tera::Tera;
|
||||
|
||||
use anyhow::{anyhow,Result,Context,Error};
|
||||
|
||||
use clds::clouds::defs::{ConfigResources,Server};
|
||||
use clds::pkgs::{Packages};
|
||||
|
||||
|
||||
pub fn trim_newline(s: &mut String) {
|
||||
if s.ends_with('\n') {
|
||||
s.pop();
|
||||
if s.ends_with('\r') {
|
||||
s.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// render Tera
|
||||
///
|
||||
pub async fn tera_render(tera: &mut Tera, tpl_context: tera::Context, tpl: &str, output_path: &str, append: bool) -> Result<()> {
|
||||
let mut all_tpls = vec![("data-template.html",tpl)];
|
||||
// match read_path_file(&path, &template_head, "content") {
|
||||
// Ok(tpl_head) => all_tpls.push((&template_head,tpl_head)),
|
||||
// Err(_) => {} // ignore if no header
|
||||
// }
|
||||
tera.add_raw_templates(all_tpls);
|
||||
match tera.render("data-template.html", &tpl_context) {
|
||||
Ok(mut res) => if append && Path::new(&output_path).exists() {
|
||||
let mut file = OpenOptions::new().append(true).open(&output_path)?;
|
||||
res = res.replace("/", "/");
|
||||
// println!("{}",&res);
|
||||
file.write(res.as_bytes())?;
|
||||
} else {
|
||||
// fs::write(&output_path, res)?;
|
||||
let mut file = OpenOptions::new().write(true).truncate(true).open(&output_path)?;
|
||||
file.write_all(&res.as_bytes())?;
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Error 'tera_render': {}",e);
|
||||
// return Err(anyhow!("Error tera render: {}", e));
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// tpl_data
|
||||
pub async fn tpl_data_map(config: &HashMap<String, String>, tpl: &str, output_path: &str, append: bool) -> Result<()> {
|
||||
let mut tera = Tera::default();
|
||||
match tera::Context::from_serialize(&config) {
|
||||
Ok(tpl_context) => {
|
||||
// tpl_context.contains_key(index);
|
||||
// tpl_context.insert(index);
|
||||
tera_render(&mut tera, tpl_context, tpl, output_path, append).await.with_context(|| format!("Failed to render 'data_map' {}", &tpl))?;
|
||||
},
|
||||
Err(e) => return Err(anyhow!("Error tera context 'data_map' serialize: {:?}", e)),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// tpl_data
|
||||
pub async fn tpl_data(config: &ConfigResources, tpl: &str, output_path: &str, append: bool) -> Result<()> {
|
||||
let mut tera = tera::Tera::default();
|
||||
// let template_head = "header.html";
|
||||
//let tpl = read_path_file(&path, &template_name, "content")?;
|
||||
match tera::Context::from_serialize(config) {
|
||||
Ok(tpl_context) => {
|
||||
// tpl_context.contains_key(index);
|
||||
// tpl_context.insert(index);
|
||||
tera_render(&mut tera, tpl_context, tpl, output_path, append).await.with_context(|| format!("Failed to render 'data' {}", &tpl))?;
|
||||
},
|
||||
Err(e) => return Err(anyhow!("Error tera context 'data' serialize: {:?}", e)),
|
||||
}
|
||||
|
||||
// Ok(res)
|
||||
|
||||
// let mut data_hash: HashMap<String, String> = HashMap::new();
|
||||
// let mut tpl_context = tera::Context::new();
|
||||
// // println!("data: {} template: {}",data_file,tpl_file);
|
||||
// match hash_from_data(data_file, &mut tpl_context, &mut data_hash, true) {
|
||||
// Ok(_) => {
|
||||
// match data_templated(&"".to_string(),tpl_file,&mut tpl_context, &data_hash) {
|
||||
// Ok(result) => println!("{}",result),
|
||||
// Err(e) => println!("Error with {} data in {} template: {}", data_file, tpl_file, e),
|
||||
// }
|
||||
// },
|
||||
// Err(e) => println!("Error with {} data in {} template: {}", data_file, tpl_file, e),
|
||||
// }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// tpl_data
|
||||
pub async fn tpl_data_server(config: &Server, tpl: &str, output_path: &str, append: bool) -> Result<()> {
|
||||
// if let Some(servers) = &config.servers {
|
||||
let mut tera = tera::Tera::default();
|
||||
match tera::Context::from_serialize(&config) {
|
||||
Ok(tpl_context) => {
|
||||
// tpl_context.contains_key(index);
|
||||
// tpl_context.insert(index);
|
||||
tera_render(&mut tera, tpl_context, tpl, output_path, append).await.with_context(|| format!("Failed to render 'data_server' {}:", &tpl))?;
|
||||
},
|
||||
Err(e) => return Err(anyhow!("Error tera context 'data_server' serialize: {:?}", e)),
|
||||
}
|
||||
// }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
/// yaml
|
||||
pub fn load_yaml_config(path: String) -> Result<ConfigResources> { // }, serde_yaml::Error> {
|
||||
// let mut data= Config::default();
|
||||
let content=fs::read_to_string(path)?;
|
||||
let data: ConfigResources = serde_yaml::from_str(&content)?;
|
||||
// println!("{:#?}",data);
|
||||
Ok(data)
|
||||
}
|
||||
pub fn load_yaml_packages(path: String) -> Result<Packages> { // }, serde_yaml::Error> {
|
||||
// let mut data= Config::default();
|
||||
let content=fs::read_to_string(path)?;
|
||||
let data: Packages = serde_yaml::from_str(&content)?;
|
||||
// println!("{:#?}",data);
|
||||
Ok(data)
|
||||
}
|
||||
572
src/validators.rs
Normal file
572
src/validators.rs
Normal file
|
|
@ -0,0 +1,572 @@
|
|||
/*! Validator functions suitable for use with `Clap` and `StructOpt` */
|
||||
// Copyright 2017-2021, Stephan Sokolow
|
||||
|
||||
use std::ffi::OsString;
|
||||
use std::fs::File;
|
||||
use std::path::{Component, Path};
|
||||
|
||||
use faccess::PathExt;
|
||||
|
||||
/// Special filenames which cannot be used for real files under Win32
|
||||
///
|
||||
/// (Unless your app uses the `\\?\` path prefix to bypass legacy Win32 API compatibility
|
||||
/// limitations)
|
||||
///
|
||||
/// **NOTE:** These are still reserved if you append an extension to them.
|
||||
///
|
||||
/// Sources:
|
||||
/// * [Boost Path Name Portability Guide
|
||||
/// ](https://www.boost.org/doc/libs/1_36_0/libs/filesystem/doc/portability_guide.htm)
|
||||
/// * Wikipedia: [Filename: Comparison of filename limitations
|
||||
/// ](https://en.wikipedia.org/wiki/Filename#Comparison_of_filename_limitations)
|
||||
///
|
||||
/// **TODO:** Decide what (if anything) to do about the NTFS "only in root directory" reservations.
|
||||
#[rustfmt::skip]
|
||||
pub const RESERVED_DOS_FILENAMES: &[&str] = &["AUX", "CON", "NUL", "PRN",
|
||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", // Serial Ports
|
||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9", // Parallel Ports
|
||||
"CLOCK$", "$IDLE$", "CONFIG$", "KEYBD$", "LST", "SCREEN$"];
|
||||
|
||||
/// Test that the given path *should* be writable
|
||||
///
|
||||
/// ## Use For:
|
||||
/// * Output directories that should exist and be writable.
|
||||
///
|
||||
/// ## Relevant Conventions:
|
||||
/// * Use `-o` to specify the output path if doing so is optional. Less commonly, `-d` is also
|
||||
/// used. [[1]](http://www.catb.org/esr/writings/taoup/html/ch10s05.html)
|
||||
///
|
||||
/// ## Cautions:
|
||||
/// * Never assume a directory's permissions will remain unchanged between the time you check them
|
||||
/// and the time you attempt to use them.
|
||||
/// * Some filesystems provide sufficiently fine-grained permissions that having access to create
|
||||
/// a file does not imply having access to delete the file you've created.
|
||||
///
|
||||
/// **TODO:** A complementary validator which will verify that the closest existing ancestor is
|
||||
/// writable. (for things that will `mkdir -p` if necessary.)
|
||||
#[cfg(unix)]
|
||||
pub fn path_output_dir<P: AsRef<Path> + ?Sized>(value: &P) -> Result<(), OsString> {
|
||||
let path = value.as_ref();
|
||||
|
||||
if !path.is_dir() {
|
||||
return Err(format!("Not a directory: {}", path.display()).into());
|
||||
}
|
||||
|
||||
if path.writable() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Err(format!("Would be unable to write to destination directory: {}", path.display()).into())
|
||||
}
|
||||
|
||||
/// The given path is a file that can be opened for reading or `-` denoting `stdin`
|
||||
///
|
||||
/// ## Use For:
|
||||
/// * Input file paths
|
||||
///
|
||||
/// ## Relevant Conventions:
|
||||
/// * If specifying an input file via an option flag, use `-f` as the name of the flag.
|
||||
/// [[1]](http://www.catb.org/esr/writings/taoup/html/ch10s05.html)
|
||||
/// * Prefer taking input paths as positional arguments and, if feasible, allow an arbitrary
|
||||
/// number of input arguments. This allows easy use of shell globs.
|
||||
///
|
||||
/// **Note:** The following command-lines, which interleave files and `stdin`, are a good test of
|
||||
/// how the above conventions should interact:
|
||||
///
|
||||
/// data_source | my_utility_a header.dat - footer.dat > output.dat
|
||||
/// data_source | my_utility_b -f header.dat -f - -f footer.dat > output.dat
|
||||
///
|
||||
/// ## Cautions:
|
||||
/// * If the value is not `-`, this will momentarily open the given path for reading to verify
|
||||
/// that it is readable. However, relying on this to remain true will introduce a race
|
||||
/// condition. This validator is intended only to allow your program to exit as quickly as
|
||||
/// possible in the case of obviously bad input.
|
||||
/// * As a more reliable validity check, you are advised to open a handle to the file in question
|
||||
/// as early in your program's operation as possible, use it for all your interactions with the
|
||||
/// file, and keep it open until you are finished. This will both verify its validity and
|
||||
/// minimize the window in which another process could render the path invalid.
|
||||
#[rustfmt::skip]
|
||||
pub fn path_readable_file_or_stdin<P: AsRef<Path> + ?Sized>(value: &P)
|
||||
-> std::result::Result<(), OsString> {
|
||||
if value.as_ref().to_string_lossy() == "-" {
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
path_readable_file(value)
|
||||
}
|
||||
|
||||
/// The given path is a file that can be opened for reading
|
||||
///
|
||||
/// ## Use For:
|
||||
/// * Input file paths
|
||||
///
|
||||
/// ## Relevant Conventions:
|
||||
/// * **Prefer [`path_readable_file_or_stdin`](fn.path_readable_file_or_stdin.html).**
|
||||
/// Commands should support taking input via `stdin` whenever feasible.
|
||||
/// * If specifying an input file via an option flag, use `-f` as the name of the flag.
|
||||
/// [[1]](http://www.catb.org/esr/writings/taoup/html/ch10s05.html)
|
||||
/// * Prefer taking input paths as positional arguments and, if feasible, allow an arbitrary
|
||||
/// number of input arguments. This allows easy use of shell globs.
|
||||
///
|
||||
/// ## Cautions:
|
||||
/// * This will momentarily open the given path for reading to verify that it is readable.
|
||||
/// However, relying on this to remain true will introduce a race condition. This validator is
|
||||
/// intended only to allow your program to exit as quickly as possible in the case of obviously
|
||||
/// bad input.
|
||||
/// * As a more reliable validity check, you are advised to open a handle to the file in question
|
||||
/// as early in your program's operation as possible, use it for all your interactions with the
|
||||
/// file, and keep it open until you are finished. This will both verify its validity and
|
||||
/// minimize the window in which another process could render the path invalid.
|
||||
#[rustfmt::skip]
|
||||
pub fn path_readable_file<P: AsRef<Path> + ?Sized>(value: &P)
|
||||
-> std::result::Result<(), OsString> {
|
||||
let path = value.as_ref();
|
||||
|
||||
if path.is_dir() {
|
||||
return Err(format!("{}: Input path must be a file, not a directory",
|
||||
path.display()).into());
|
||||
}
|
||||
|
||||
File::open(path).map(|_| ()).map_err(|e| format!("{}: {}", path.display(), e).into())
|
||||
}
|
||||
|
||||
// TODO: Implement path_readable_dir and path_readable for --recurse use-cases
|
||||
|
||||
/// The given path is valid on all major filesystems and OSes
|
||||
///
|
||||
/// ## Use For:
|
||||
/// * Output file or directory paths that will be created if missing
|
||||
/// (See also [`path_output_dir`](fn.path_output_dir.html).)
|
||||
///
|
||||
/// ## Relevant Conventions:
|
||||
/// * Use `-o` to specify the output path if doing so is optional.
|
||||
/// [[1]](http://www.catb.org/esr/writings/taoup/html/ch10s05.html)
|
||||
/// [[2]](http://tldp.org/LDP/abs/html/standard-options.html)
|
||||
/// * Interpret a value of `-` to mean "Write output to stdout".
|
||||
/// [[3]](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html)
|
||||
/// * Because `-o` does not inherently indicate whether it expects a file or a directory, consider
|
||||
/// also providing a GNU-style long version with a name like `--outfile` to allow scripts which
|
||||
/// depend on your tool to be more self-documenting.
|
||||
///
|
||||
/// ## Cautions:
|
||||
/// * To ensure files can be copied/moved without issue, this validator may impose stricter
|
||||
/// restrictions on filenames than your filesystem. Do *not* use it for input paths.
|
||||
/// * Other considerations, such as paths containing symbolic links with longer target names, may
|
||||
/// still cause your system to reject paths which pass this check.
|
||||
/// * As a more reliable validity check, you are advised to open a handle to the file in question
|
||||
/// as early in your program's operation as possible and keep it open until you are finished.
|
||||
/// This will both verify its validity and minimize the window in which another process could
|
||||
/// render the path invalid.
|
||||
///
|
||||
/// ## Design Considerations:
|
||||
/// * Many popular Linux filesystems impose no total length limit.
|
||||
/// * This function imposes a 32,760-character limit for compatibility with flash drives formatted
|
||||
/// FAT32 or exFAT. [[4]](https://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits)
|
||||
/// * Some POSIX API functions, such as `getcwd()` and `realpath()` rely on the `PATH_MAX`
|
||||
/// constant, which typically specifies a length of 4096 bytes including terminal `NUL`, but
|
||||
/// this is not enforced by the filesystem itself.
|
||||
/// [[5]](https://insanecoding.blogspot.com/2007/11/pathmax-simply-isnt.html)
|
||||
///
|
||||
/// Programs which rely on libc for this functionality but do not attempt to canonicalize paths
|
||||
/// will usually work if you change the working directory and use relative paths.
|
||||
/// * The following lengths were considered too limiting to be enforced by this function:
|
||||
/// * The UDF filesystem used on DVDs imposes a 1023-byte length limit on paths.
|
||||
/// * When not using the `\\?\` prefix to disable legacy compatibility, Windows paths are
|
||||
/// limited to 260 characters, which was arrived at as `A:\MAX_FILENAME_LENGTH<NULL>`.
|
||||
/// [[6]](https://stackoverflow.com/a/1880453/435253)
|
||||
/// * ISO 9660 without Joliet or Rock Ridge extensions does not permit periods in directory
|
||||
/// names, directory trees more than 8 levels deep, or filenames longer than 32 characters.
|
||||
/// [[7]](https://www.boost.org/doc/libs/1_36_0/libs/filesystem/doc/portability_guide.htm)
|
||||
/// * See [`filename_valid_portable`](fn.filename_valid_portable.html) for design considerations
|
||||
/// relating to individual path components.
|
||||
///
|
||||
/// **TODO:** Write another function for enforcing the limits imposed by targeting optical media.
|
||||
pub fn path_valid_portable<P: AsRef<Path> + ?Sized>(value: &P) -> Result<(), OsString> {
|
||||
let path = value.as_ref();
|
||||
|
||||
#[allow(clippy::decimal_literal_representation)] // Path lengths are most intuitive as decimal
|
||||
if path.as_os_str().is_empty() {
|
||||
Err("Path is empty".into())
|
||||
} else if path.as_os_str().len() > 32760 {
|
||||
// Limit length to fit on VFAT/exFAT when using the `\\?\` prefix to disable legacy limits
|
||||
// Source: https://en.wikipedia.org/wiki/Comparison_of_file_systems
|
||||
Err(format!("Path is too long ({} chars): {:?}", path.as_os_str().len(), path).into())
|
||||
} else {
|
||||
for component in path.components() {
|
||||
if let Component::Normal(string) = component {
|
||||
filename_valid_portable(string)?
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// The string is a valid file/folder name on all major filesystems and OSes
|
||||
///
|
||||
/// ## Use For:
|
||||
/// * Output file or directory names within a parent directory specified through other means.
|
||||
///
|
||||
/// ## Relevant Conventions:
|
||||
/// * Most of the time, you want to let users specify a full path via [`path_valid_portable`
|
||||
/// ](fn.path_valid_portable.html) instead.
|
||||
///
|
||||
/// ## Cautions:
|
||||
/// * To ensure files can be copied/moved without issue, this validator may impose stricter
|
||||
/// restrictions on filenames than your filesystem. Do *not* use it for input filenames.
|
||||
/// * This validator cannot guarantee that a given filename will be valid once other
|
||||
/// considerations such as overall path length limits are taken into account.
|
||||
/// * As a more reliable validity check, you are advised to open a handle to the file in question
|
||||
/// as early in your program's operation as possible, use it for all your interactions with the
|
||||
/// file, and keep it open until you are finished. This will both verify its validity and
|
||||
/// minimize the window in which another process could render the path invalid.
|
||||
///
|
||||
/// ## Design Considerations:
|
||||
/// * In the interest of not inconveniencing users in the most common case, this validator imposes
|
||||
/// a 255-character length limit.
|
||||
/// [[1]](https://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits)
|
||||
/// * The eCryptFS home directory encryption offered by Ubuntu Linux imposes a 143-character
|
||||
/// length limit when filename encryption is enabled.
|
||||
/// [[2]](https://bugs.launchpad.net/ecryptfs/+bug/344878)
|
||||
/// * the Joliet extensions for ISO 9660 are specified to support only 64-character filenames and
|
||||
/// tested to support either 103 or 110 characters depending whether you ask the mkisofs
|
||||
/// developers or Microsoft. [[3]](https://en.wikipedia.org/wiki/Joliet_(file_system))
|
||||
/// * The [POSIX Portable Filename Character Set
|
||||
/// ](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_282)
|
||||
/// is too restrictive to be baked into a general-purpose validator.
|
||||
///
|
||||
/// **TODO:** Consider converting this to a private function that just exists as a helper for the
|
||||
/// path validator in favour of more specialized validators for filename patterns, prefixes, and/or
|
||||
/// suffixes, to properly account for how "you can specify a name but not a path" generally
|
||||
/// comes about.
|
||||
pub fn filename_valid_portable<P: AsRef<Path> + ?Sized>(value: &P) -> Result<(), OsString> {
|
||||
let path = value.as_ref();
|
||||
|
||||
// TODO: Should I refuse incorrect Unicode normalization since Finder doesn't like it or just
|
||||
// advise users to run a normalization pass?
|
||||
// Source: https://news.ycombinator.com/item?id=16993687
|
||||
|
||||
// Check that the length is within range
|
||||
let os_str = path.as_os_str();
|
||||
if os_str.len() > 255 {
|
||||
#[rustfmt::skip]
|
||||
return Err(format!("File/folder name is too long ({} chars): {}",
|
||||
path.as_os_str().len(), path.display()).into());
|
||||
}
|
||||
|
||||
// Check for invalid characters
|
||||
let lossy_str = match os_str.to_str() {
|
||||
Some(string) => string,
|
||||
None => {
|
||||
return Err("File/folder names containing non-UTF8 characters aren't portable".into())
|
||||
}
|
||||
};
|
||||
let last_char = match lossy_str.chars().last() {
|
||||
Some(chr) => chr,
|
||||
None => return Err("File/folder name is empty".into()),
|
||||
};
|
||||
if [' ', '.'].iter().any(|&x| x == last_char) {
|
||||
// The Windows shell and UI don't support component names ending in periods or spaces
|
||||
// Source: https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file
|
||||
return Err("Windows forbids path components ending with spaces/periods".into());
|
||||
}
|
||||
|
||||
#[allow(clippy::match_same_arms)] // Would need to cram everything onto one arm otherwise
|
||||
if lossy_str.as_bytes().iter().any(|c| match c {
|
||||
// invalid on all APIs which don't use counted strings like inside the NT kernel
|
||||
b'\0' => true,
|
||||
// invalid under FAT*, VFAT, exFAT, and NTFS
|
||||
0x1..=0x1f | 0x7f | b'"' | b'*' | b'<' | b'>' | b'?' | b'|' => true,
|
||||
// POSIX path separator (invalid on Unixy platforms like Linux and BSD)
|
||||
b'/' => true,
|
||||
// HFS/Carbon path separator (invalid in filenames on MacOS and Mac filesystems)
|
||||
// DOS/Win32 drive separator (invalid in filenames on Windows and Windows filesystems)
|
||||
b':' => true,
|
||||
// DOS/Windows path separator (invalid in filenames on Windows and Windows filesystems)
|
||||
b'\\' => true,
|
||||
// let everything else through
|
||||
_ => false,
|
||||
}) {
|
||||
#[rustfmt::skip]
|
||||
return Err(format!("Path component contains invalid characters: {}",
|
||||
path.display()).into());
|
||||
}
|
||||
|
||||
// Reserved DOS filenames that still can't be used on modern Windows for compatibility
|
||||
if let Some(file_stem) = path.file_stem() {
|
||||
let stem = file_stem.to_string_lossy().to_uppercase();
|
||||
if RESERVED_DOS_FILENAMES.iter().any(|&x| x == stem) {
|
||||
Err(format!("Filename is reserved on Windows: {:?}", file_stem).into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::wildcard_imports, clippy::panic, clippy::result_expect_used)] // OK for tests
|
||||
|
||||
use super::*;
|
||||
use std::ffi::OsStr;
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
#[cfg(windows)]
|
||||
use std::os::windows::ffi::OsStringExt;
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
#[rustfmt::skip]
|
||||
fn path_output_dir_basic_functionality() {
|
||||
assert!(path_output_dir(OsStr::new("/")).is_err()); // Root
|
||||
assert!(path_output_dir(OsStr::new("/tmp")).is_ok()); // OK Folder
|
||||
assert!(path_output_dir(OsStr::new("/dev/null")).is_err()); // OK File
|
||||
assert!(path_output_dir(OsStr::new("/etc/shadow")).is_err()); // Denied File
|
||||
assert!(path_output_dir(OsStr::new("/etc/ssl/private")).is_err()); // Denied Folder
|
||||
assert!(path_output_dir(OsStr::new("/nonexistant_test_path")).is_err()); // Missing Path
|
||||
assert!(path_output_dir(OsStr::new("/tmp\0with\0null")).is_err()); // Invalid CString
|
||||
// TODO: Not-already-canonicalized paths (eg. relative paths)
|
||||
|
||||
assert!(path_output_dir(OsStr::from_bytes(b"/not\xffutf8")).is_err()); // Invalid UTF-8
|
||||
// TODO: Non-UTF8 path that actually does exist and is writable
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn path_output_dir_basic_functionality() {
|
||||
unimplemented!("TODO: Implement Windows version of path_output_dir");
|
||||
}
|
||||
|
||||
// ---- path_readable_file ----
|
||||
|
||||
#[test]
|
||||
fn path_readable_file_stdin_test() {
|
||||
assert!(path_readable_file(OsStr::new("-")).is_err());
|
||||
assert!(path_readable_file_or_stdin(OsStr::new("-")).is_ok());
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn path_readable_file_basic_functionality() {
|
||||
for func in &[path_readable_file, path_readable_file_or_stdin] {
|
||||
// Existing paths
|
||||
assert!(func(OsStr::new("/bin/sh")).is_ok()); // OK File
|
||||
assert!(func(OsStr::new("/bin/../etc/.././bin/sh")).is_ok()); // Non-canonicalized
|
||||
assert!(func(OsStr::new("/../../../../bin/sh")).is_ok()); // Above root
|
||||
|
||||
// Inaccessible, nonexistent, or invalid paths
|
||||
assert!(func(OsStr::new("")).is_err()); // Empty String
|
||||
assert!(func(OsStr::new("/")).is_err()); // OK Folder
|
||||
assert!(func(OsStr::new("/etc/shadow")).is_err()); // Denied File
|
||||
assert!(func(OsStr::new("/etc/ssl/private")).is_err()); // Denied Folder
|
||||
assert!(func(OsStr::new("/nonexistant_test_path")).is_err()); // Missing Path
|
||||
assert!(func(OsStr::new("/null\0containing")).is_err()); // Invalid CString
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn path_readable_file_basic_functionality() {
|
||||
unimplemented!("TODO: Pick some appropriate equivalent test paths for Windows");
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn path_readable_file_invalid_utf8() {
|
||||
for func in &[path_readable_file, path_readable_file_or_stdin] {
|
||||
assert!(func(OsStr::from_bytes(b"/not\xffutf8")).is_err()); // Invalid UTF-8
|
||||
// TODO: Non-UTF8 path that actually IS valid
|
||||
}
|
||||
}
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn path_readable_file_unpaired_surrogates() {
|
||||
for func in &[path_readable_file, path_readable_file_or_stdin] {
|
||||
assert!(path_readable_file(&OsString::from_wide(
|
||||
&['C' as u16, ':' as u16, '\\' as u16, 0xd800])).is_err());
|
||||
// TODO: Unpaired surrogate path that actually IS valid
|
||||
}
|
||||
}
|
||||
|
||||
// ---- filename_valid_portable ----
|
||||
|
||||
#[rustfmt::skip]
|
||||
const VALID_FILENAMES: &[&str] = &[
|
||||
"-", // stdin/stdout
|
||||
"test1", "te st", ".test", // regular, space, and leading period
|
||||
"lpt", "lpt0", "lpt10", // would break if DOS reserved check is doing dumb matching
|
||||
];
|
||||
|
||||
// Paths which should pass because std::path::Path will recognize the separators
|
||||
// TODO: Actually run the tests on Windows to make sure they work
|
||||
#[cfg(windows)]
|
||||
const PATHS_WITH_NATIVE_SEPARATORS: &[&str] =
|
||||
&["re/lative", "/ab/solute", "re\\lative", "\\ab\\solute"];
|
||||
#[cfg(unix)]
|
||||
const PATHS_WITH_NATIVE_SEPARATORS: &[&str] = &["re/lative", "/ab/solute"];
|
||||
|
||||
// Paths which should fail because std::path::Path won't recognize the separators and we don't
|
||||
// want them showing up in the components.
|
||||
#[cfg(windows)]
|
||||
const PATHS_WITH_FOREIGN_SEPARATORS: &[&str] = &["Classic Mac HD:Folder Name:File"];
|
||||
#[cfg(unix)]
|
||||
const PATHS_WITH_FOREIGN_SEPARATORS: &[&str] = &[
|
||||
"relative\\win32",
|
||||
"C:\\absolute\\win32",
|
||||
"\\drive\\relative\\win32",
|
||||
"\\\\unc\\path\\for\\win32",
|
||||
"Classic Mac HD:Folder Name:File",
|
||||
];
|
||||
|
||||
// Source: https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file
|
||||
#[rustfmt::skip]
|
||||
const INVALID_PORTABLE_FILENAMES: &[&str] = &[
|
||||
"test\x03", "test\x07", "test\x08", "test\x0B", "test\x7f", // Control characters (VFAT)
|
||||
"\"test\"", "<testsss", "testsss>", "testsss|", "testsss*", "testsss?", "?estsss", // VFAT
|
||||
"ends with space ", "ends_with_period.", // DOS/Win32
|
||||
"CON", "Con", "coN", "cOn", "CoN", "con", "lpt1", "com9", // Reserved names (DOS/Win32)
|
||||
"con.txt", "lpt1.dat", // DOS/Win32 API (Reserved names are extension agnostic)
|
||||
"", "\0"]; // POSIX
|
||||
|
||||
#[test]
|
||||
fn filename_valid_portable_accepts_valid_names() {
|
||||
for path in VALID_FILENAMES {
|
||||
assert!(filename_valid_portable(OsStr::new(path)).is_ok(), "{:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filename_valid_portable_refuses_path_separators() {
|
||||
for path in PATHS_WITH_NATIVE_SEPARATORS {
|
||||
assert!(filename_valid_portable(OsStr::new(path)).is_err(), "{:?}", path);
|
||||
}
|
||||
for path in PATHS_WITH_FOREIGN_SEPARATORS {
|
||||
assert!(filename_valid_portable(OsStr::new(path)).is_err(), "{:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filename_valid_portable_refuses_invalid_characters() {
|
||||
for fname in INVALID_PORTABLE_FILENAMES {
|
||||
assert!(filename_valid_portable(OsStr::new(fname)).is_err(), "{:?}", fname);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filename_valid_portable_refuses_empty_strings() {
|
||||
assert!(filename_valid_portable(OsStr::new("")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filename_valid_portable_enforces_length_limits() {
|
||||
// 256 characters
|
||||
let mut test_str = std::str::from_utf8(&[b'X'; 256]).expect("parsing constant");
|
||||
assert!(filename_valid_portable(OsStr::new(test_str)).is_err());
|
||||
|
||||
// 255 characters (maximum for NTFS, ext2/3/4, and a lot of others)
|
||||
test_str = std::str::from_utf8(&[b'X'; 255]).expect("parsing constant");
|
||||
assert!(filename_valid_portable(OsStr::new(test_str)).is_ok());
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn filename_valid_portable_refuses_non_utf8_bytes() {
|
||||
// Ensure that we refuse invalid UTF-8 since it's unclear if/how things like POSIX's
|
||||
// "bag of bytes" paths and Windows's un-paired UTF-16 surrogates map to each other.
|
||||
assert!(filename_valid_portable(OsStr::from_bytes(b"\xff")).is_err());
|
||||
}
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn filename_valid_portable_accepts_unpaired_surrogates() {
|
||||
assert!(path_valid_portable(&OsString::from_wide(&[0xd800])).is_ok());
|
||||
}
|
||||
|
||||
// ---- path_valid_portable ----
|
||||
|
||||
#[test]
|
||||
fn path_valid_portable_accepts_stdout() {
|
||||
assert!(path_valid_portable(OsStr::new("-")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_valid_portable_accepts_valid_names() {
|
||||
for path in VALID_FILENAMES {
|
||||
assert!(path_valid_portable(OsStr::new(path)).is_ok(), "{:?}", path);
|
||||
}
|
||||
|
||||
// No filename (.file_stem() returns None)
|
||||
assert!(path_valid_portable(OsStr::new("foo/..")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_valid_portable_accepts_native_path_separators() {
|
||||
for path in PATHS_WITH_NATIVE_SEPARATORS {
|
||||
assert!(path_valid_portable(OsStr::new(path)).is_ok(), "{:?}", path);
|
||||
}
|
||||
|
||||
// Verify that repeated separators are getting collapsed before filename_valid_portable
|
||||
// sees them.
|
||||
// TODO: Make this conditional on platform and also test repeated backslashes on Windows
|
||||
assert!(path_valid_portable(OsStr::new("/path//with/repeated//separators")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_valid_portable_refuses_foreign_path_separators() {
|
||||
for path in PATHS_WITH_FOREIGN_SEPARATORS {
|
||||
assert!(path_valid_portable(OsStr::new(path)).is_err(), "{:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_valid_portable_refuses_invalid_characters() {
|
||||
for fname in INVALID_PORTABLE_FILENAMES {
|
||||
assert!(path_valid_portable(OsStr::new(fname)).is_err(), "{:?}", fname);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_valid_portable_enforces_length_limits() {
|
||||
let mut test_string = String::with_capacity(255 * 130);
|
||||
#[allow(clippy::decimal_literal_representation)] // Path lengths more intuitive as decimal
|
||||
while test_string.len() < 32761 {
|
||||
test_string.push_str(std::str::from_utf8(&[b'X'; 255]).expect("utf8 from literal"));
|
||||
test_string.push('/');
|
||||
}
|
||||
|
||||
// >32760 characters
|
||||
assert!(path_valid_portable(OsStr::new(&test_string)).is_err());
|
||||
|
||||
// 32760 characters (maximum for FAT32/VFAT/exFAT)
|
||||
test_string.truncate(32760);
|
||||
assert!(path_valid_portable(OsStr::new(&test_string)).is_ok());
|
||||
|
||||
// 256 characters with no path separators
|
||||
test_string.truncate(255);
|
||||
test_string.push('X');
|
||||
assert!(path_valid_portable(OsStr::new(&test_string)).is_err());
|
||||
|
||||
// 255 characters with no path separators
|
||||
test_string.truncate(255);
|
||||
assert!(path_valid_portable(OsStr::new(&test_string)).is_ok());
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn path_valid_portable_accepts_non_utf8_bytes() {
|
||||
// Ensure that we refuse invalid UTF-8 since it's unclear if/how things like POSIX's
|
||||
// "bag of bytes" paths and Windows's un-paired UTF-16 surrogates map to each other.
|
||||
assert!(path_valid_portable(OsStr::from_bytes(b"/\xff/foo")).is_err());
|
||||
}
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn path_valid_portable_accepts_unpaired_surrogates() {
|
||||
#[rustfmt::skip]
|
||||
assert!(path_valid_portable(&OsString::from_wide(
|
||||
&['C' as u16, ':' as u16, '\\' as u16, 0xd800])).is_ok());
|
||||
}
|
||||
}
|
||||
87
src/web.rs
Normal file
87
src/web.rs
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
// #![deny(warnings)]
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
|
||||
use crate::clouds;
|
||||
use tokio::fs::File;
|
||||
|
||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||
|
||||
use hyper::service::{make_service_fn, service_fn};
|
||||
use hyper::{Body, Method, Request, Response, Result, Server, StatusCode};
|
||||
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
static INDEX: &str = "html/send_file_index.html";
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
static NOTFOUND: &[u8] = b"Not Found";
|
||||
|
||||
// https://docs.rs/hyper/0.11.2/hyper/header/struct.Authorization.html
|
||||
// https://github.com/aramperes/hyper-server-ssl-auth-example/blob/master/src/main.rs
|
||||
// https://dev.to/deciduously/skip-the-framework-build-a-simple-rust-api-with-hyper-4jf5
|
||||
/*
|
||||
|
||||
#[tokio::main]
|
||||
pub async fn web_main() -> std::io::Result<()> { // Result<()> {
|
||||
pretty_env_logger::init();
|
||||
|
||||
let addr = "127.0.0.1:1337".parse().unwrap();
|
||||
|
||||
let make_service =
|
||||
make_service_fn(|_| async { Ok::<_, hyper::Error>(service_fn(response_examples)) });
|
||||
|
||||
let server = Server::bind(&addr).serve(make_service);
|
||||
|
||||
println!("Listening on http://{}", addr);
|
||||
|
||||
if let Err(e) = server.await {
|
||||
eprintln!("server error: {}", e);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
*/
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
pub async fn response_examples(req: Request<Body>) -> Result<Response<Body>> {
|
||||
match (req.method(), req.uri().path()) {
|
||||
(&Method::GET, "/") | (&Method::GET, "/index.html") => simple_file_send(INDEX).await,
|
||||
(&Method::GET, "/y") | (&Method::GET, "/yaml.html") => data_file_send().await,
|
||||
(&Method::GET, "/no_file.html") => {
|
||||
// Test what happens when file cannot be be found
|
||||
simple_file_send("this_file_should_not_exist.html").await
|
||||
}
|
||||
_ => Ok(not_found()),
|
||||
}
|
||||
}
|
||||
|
||||
/// HTTP status code 404
|
||||
#[allow(clippy::missing_docs_in_private_items,clippy::unwrap_used)]
|
||||
fn not_found() -> Response<Body> {
|
||||
Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body(NOTFOUND.into())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
pub async fn simple_file_send(filename: &str) -> Result<Response<Body>> {
|
||||
// Serve a file by asynchronously reading it by chunks using tokio-util crate.
|
||||
|
||||
if let Ok(file) = File::open(filename).await {
|
||||
let stream = FramedRead::new(file, BytesCodec::new());
|
||||
let body = Body::wrap_stream(stream);
|
||||
return Ok(Response::new(body));
|
||||
}
|
||||
|
||||
Ok(not_found())
|
||||
}
|
||||
|
||||
#[allow(clippy::missing_docs_in_private_items)]
|
||||
pub async fn data_file_send() -> Result<Response<Body>> {
|
||||
// Serve a file by asynchronously reading it by chunks using tokio-util crate.
|
||||
|
||||
// if let Ok(file) = File::open(filename).await {
|
||||
// let stream = FramedRead::new(file, BytesCodec::new());
|
||||
// let body = Body::wrap_stream(stream);
|
||||
match clouds::on_cloud("createserver", "test","install","","", 1).await {
|
||||
Ok(data) => Ok(Response::new(Body::from(data))),
|
||||
Err(_) =>Ok(Response::new(Body::from("Error")))
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue