Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
c93fc0ae59 | |||
5b81587184 | |||
b34f0fbd5f | |||
6dbc6b6485 | |||
ab70fabbe3 | |||
882ea24dea | |||
7bf17a73ad | |||
5e36269671 | |||
56695fc2dd | |||
80810d30a6 | |||
836ab03ccf | |||
9f77102f3a | |||
97a2bfa891 | |||
8a9fa96599 | |||
d4f5bbce45 | |||
943154725a |
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "zterton"
|
name = "zterton"
|
||||||
version = "0.1.2"
|
version = "0.1.3"
|
||||||
authors = ["JesusPerez <jpl@jesusperez.pro>"]
|
authors = ["JesusPerez <jpl@jesusperez.pro>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description= "WebServices and Clouds Monitoring for LibreCloud"
|
description= "WebServices and Clouds Monitoring for LibreCloud"
|
||||||
@ -56,7 +56,7 @@ redis = { version = "0.21.2", features = [ "tokio-comp", "cluster"] }
|
|||||||
redis-graph = { version = "0.4.1", features = ['tokio-comp'] }
|
redis-graph = { version = "0.4.1", features = ['tokio-comp'] }
|
||||||
sqlx = {version = "0.5.7", default-features = false, features = ["macros","runtime-tokio-rustls","sqlite", "mysql", "postgres", "decimal", "chrono"]}
|
sqlx = {version = "0.5.7", default-features = false, features = ["macros","runtime-tokio-rustls","sqlite", "mysql", "postgres", "decimal", "chrono"]}
|
||||||
|
|
||||||
zterton = { version = "0.1.1", path = "../lib/zterton" }
|
webenv = { version = "0.1.2", path = "../lib/webenv" }
|
||||||
|
|
||||||
app_tools = { version = "0.1.0", path = "../lib/utils/app_tools" }
|
app_tools = { version = "0.1.0", path = "../lib/utils/app_tools" }
|
||||||
app_env = { version = "0.1.0", path = "../lib/defs/app_env" }
|
app_env = { version = "0.1.0", path = "../lib/defs/app_env" }
|
||||||
@ -78,4 +78,4 @@ clds = { version = "0.1.0", path = "../lib/clds" }
|
|||||||
key_of_life = { path = "../lib/key_of_life" }
|
key_of_life = { path = "../lib/key_of_life" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
pretty_assertions = "0.7.2"
|
pretty_assertions = "1.0.0"
|
||||||
|
14
src/defs.rs
14
src/defs.rs
@ -1,4 +1,5 @@
|
|||||||
use std::collections::{BTreeMap};
|
use serde::{Serialize, Deserialize};
|
||||||
|
use std::collections::{HashMap, BTreeMap};
|
||||||
use app_env::{appenv::AppEnv, AppStore};
|
use app_env::{appenv::AppEnv, AppStore};
|
||||||
use app_auth::{AuthStore};
|
use app_auth::{AuthStore};
|
||||||
use kloud::{defs::KloudStore, datacontext::DataContext};
|
use kloud::{defs::KloudStore, datacontext::DataContext};
|
||||||
@ -11,6 +12,8 @@ use connectors::defs::{AppDataConn};
|
|||||||
use clds::clouds::defs::{
|
use clds::clouds::defs::{
|
||||||
CloudEnv,
|
CloudEnv,
|
||||||
Cloud,
|
Cloud,
|
||||||
|
SrvcsHostInfOut,
|
||||||
|
InfoStatus,
|
||||||
};
|
};
|
||||||
use kloud::kloud::Kloud;
|
use kloud::kloud::Kloud;
|
||||||
|
|
||||||
@ -73,4 +76,13 @@ pub async fn load_key() -> String {
|
|||||||
}
|
}
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
|
pub type MapCheckInfo = BTreeMap<String,Vec<SrvcsHostInfOut>>;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||||
|
pub struct KldCheck {
|
||||||
|
pub name: String,
|
||||||
|
pub liveness: HashMap<String, MapCheckInfo>,
|
||||||
|
pub apps: HashMap<String, MapCheckInfo>,
|
||||||
|
pub infos: Vec<InfoStatus>,
|
||||||
|
}
|
||||||
|
|
||||||
|
@ -67,6 +67,18 @@ impl CollFilters {
|
|||||||
prfx: String::from(prfx),
|
prfx: String::from(prfx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pub fn filters_home(
|
||||||
|
&self,
|
||||||
|
db: DataDBs,
|
||||||
|
cloud: Cloud,
|
||||||
|
cors: warp::cors::Builder,
|
||||||
|
path: &str,
|
||||||
|
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
||||||
|
) -> BoxedFilter<(impl warp::Reply,)> {
|
||||||
|
let url_path: &'static str = Box::leak(format!("{}",&path).into_boxed_str());
|
||||||
|
self.home(db.clone(),cloud.clone(),url_path,cors.clone())
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
pub fn filters_defs(
|
pub fn filters_defs(
|
||||||
&self,
|
&self,
|
||||||
db: DataDBs,
|
db: DataDBs,
|
||||||
@ -116,13 +128,39 @@ impl CollFilters {
|
|||||||
let provision_path: &'static str = Box::leak(format!("{}provision",&self.prfx).into_boxed_str());
|
let provision_path: &'static str = Box::leak(format!("{}provision",&self.prfx).into_boxed_str());
|
||||||
let status_path: &'static str = Box::leak(format!("{}status",&self.prfx).into_boxed_str());
|
let status_path: &'static str = Box::leak(format!("{}status",&self.prfx).into_boxed_str());
|
||||||
let liveness_path: &'static str = Box::leak(format!("{}liveness",&self.prfx).into_boxed_str());
|
let liveness_path: &'static str = Box::leak(format!("{}liveness",&self.prfx).into_boxed_str());
|
||||||
|
let apps_path: &'static str = Box::leak(format!("{}apps",&self.prfx).into_boxed_str());
|
||||||
self.config(db.clone(),cloud.clone(),config_path,cors.clone())
|
self.config(db.clone(),cloud.clone(),config_path,cors.clone())
|
||||||
.or(self.defs(db.clone(),defs_path,cors.clone()))
|
.or(self.defs(db.clone(),defs_path,cors.clone()))
|
||||||
.or(self.langs(db.clone(),lang_path,cors.clone()))
|
.or(self.langs(db.clone(),lang_path,cors.clone()))
|
||||||
.or(self.provision(db.clone(),cloud.clone(),provision_path,cors.clone()))
|
.or(self.provision(db.clone(),cloud.clone(),provision_path,cors.clone()))
|
||||||
.or(self.status(db.clone(),cloud.clone(),status_path,cors.clone()))
|
.or(self.status(db.clone(),cloud.clone(),status_path,cors.clone()))
|
||||||
.or(self.liveness(db.clone(),cloud.clone(),liveness_path,cors.clone()))
|
.or(self.liveness(db.clone(),cloud.clone(),liveness_path,cors.clone()))
|
||||||
|
.or(self.apps(db.clone(),cloud.clone(),apps_path,cors.clone()))
|
||||||
.boxed()
|
.boxed()
|
||||||
|
}
|
||||||
|
pub fn home(
|
||||||
|
&self,
|
||||||
|
db: DataDBs,
|
||||||
|
cloud: Cloud,
|
||||||
|
path: &'static str,
|
||||||
|
cors: warp::cors::Builder,
|
||||||
|
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
||||||
|
) -> BoxedFilter<(impl warp::Reply,)> {
|
||||||
|
let prfx = self.prfx.to_owned();
|
||||||
|
warp::path(path)
|
||||||
|
.and(warp::get())
|
||||||
|
// .and(warp::query::<KloudQueryConfigFilters>())
|
||||||
|
.and(headers_cloned())
|
||||||
|
.and(method())
|
||||||
|
// .and_then(user_authentication)
|
||||||
|
// .and(warp::header::optional::<String>("authorization"))
|
||||||
|
// .and(warp::header::optional::<String>("accept-language"))
|
||||||
|
.and(self.with_db(db))
|
||||||
|
.and(warp::any().map(move || cloud.to_owned()))
|
||||||
|
.and(warp::any().map(move || prfx.to_owned()))
|
||||||
|
.and_then(handlers::h_home::home)
|
||||||
|
.with(cors)
|
||||||
|
.boxed()
|
||||||
}
|
}
|
||||||
/// GET /ta?offset=3&limit=5
|
/// GET /ta?offset=3&limit=5
|
||||||
pub fn list(
|
pub fn list(
|
||||||
@ -399,6 +437,31 @@ impl CollFilters {
|
|||||||
.and_then(handlers::h_config::liveness)
|
.and_then(handlers::h_config::liveness)
|
||||||
.with(cors)
|
.with(cors)
|
||||||
.boxed()
|
.boxed()
|
||||||
|
}
|
||||||
|
/// GET /status?offset=3&limit=5
|
||||||
|
pub fn apps(
|
||||||
|
&self,
|
||||||
|
db: DataDBs,
|
||||||
|
cloud: Cloud,
|
||||||
|
path: &'static str,
|
||||||
|
cors: warp::cors::Builder,
|
||||||
|
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
||||||
|
) -> BoxedFilter<(impl warp::Reply,)> {
|
||||||
|
let prfx = self.prfx.to_owned();
|
||||||
|
warp::path(path)
|
||||||
|
.and(warp::get())
|
||||||
|
.and(warp::query::<KloudQueryConfigFilters>())
|
||||||
|
.and(headers_cloned())
|
||||||
|
.and(method())
|
||||||
|
// .and_then(user_authentication)
|
||||||
|
// .and(warp::header::optional::<String>("authorization"))
|
||||||
|
// .and(warp::header::optional::<String>("accept-language"))
|
||||||
|
.and(self.with_db(db))
|
||||||
|
.and(warp::any().map(move || cloud.to_owned()))
|
||||||
|
.and(warp::any().map(move || prfx.to_owned()))
|
||||||
|
.and_then(handlers::h_config::apps)
|
||||||
|
.with(cors)
|
||||||
|
.boxed()
|
||||||
}
|
}
|
||||||
fn with_db(&self, db: DataDBs) -> impl Filter<Extract = (DataDBs,), Error = std::convert::Infallible> + Clone {
|
fn with_db(&self, db: DataDBs) -> impl Filter<Extract = (DataDBs,), Error = std::convert::Infallible> + Clone {
|
||||||
warp::any().map(move || db.clone())
|
warp::any().map(move || db.clone())
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
pub mod h_config;
|
pub mod h_config;
|
||||||
pub mod h_data;
|
pub mod h_data;
|
||||||
pub mod h_defs;
|
pub mod h_defs;
|
||||||
|
pub mod h_home;
|
||||||
pub mod datastores;
|
pub mod datastores;
|
@ -15,39 +15,40 @@ use kloud::{
|
|||||||
};
|
};
|
||||||
use clds::clouds::defs::{Cloud};
|
use clds::clouds::defs::{Cloud};
|
||||||
use crate::defs::{DataDBs}; // ,CollsData};
|
use crate::defs::{DataDBs}; // ,CollsData};
|
||||||
use clds::clouds::on_clouds::{on_cloud_req,on_cloud_name_req,get_cloud_check};
|
use clds::clouds::on_req::{on_cloud_req,on_cloud_name_req_info,get_cache_data};
|
||||||
|
|
||||||
pub async fn cloud_req(reqname: &str,cloud: &Cloud,reqenv: &ReqEnv,opts: &KloudQueryConfigFilters) -> String {
|
pub async fn cloud_req(reqname: &str,cloud: &Cloud,reqenv: &ReqEnv,opts: &KloudQueryConfigFilters) -> String {
|
||||||
let mut result: String;
|
let mut result: String;
|
||||||
|
let mut liveness_result = String::from("\"\"");
|
||||||
|
let mut apps_result = String::from("\"\"");
|
||||||
|
let mut status_result = String::from("\"\"");
|
||||||
if opts.grp.is_empty() {
|
if opts.grp.is_empty() {
|
||||||
result = on_cloud_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&opts.cld).await;
|
result = on_cloud_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&opts.cld).await;
|
||||||
} else if opts.name.is_empty() {
|
|
||||||
let source = format!("{}/{}",&opts.cld,&opts.grp);
|
|
||||||
result = on_cloud_name_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&source).await;
|
|
||||||
} else {
|
} else {
|
||||||
let source = format!("{}/{}/{}",&opts.cld,&opts.grp,&opts.name);
|
let source: String;
|
||||||
result = on_cloud_name_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&source).await;
|
if opts.name.is_empty() {
|
||||||
}
|
source = format!("{}/{}",&opts.cld,&opts.grp);
|
||||||
if opts.tsksrvcs.contains("liveness") {
|
} else {
|
||||||
result = format!("{}:{}{}:{}{}",r#"{"req""#,&result,r#","liveness""#,get_cloud_check(&reqenv).await,r#"}"#);
|
source = format!("{}/{}/{}",&opts.cld,&opts.grp,&opts.name);
|
||||||
}
|
|
||||||
match reqname {
|
|
||||||
"config" => {
|
|
||||||
result.to_owned()
|
|
||||||
},
|
|
||||||
"status" => {
|
|
||||||
result.to_owned()
|
|
||||||
},
|
|
||||||
"provision" => {
|
|
||||||
result.to_owned()
|
|
||||||
},
|
|
||||||
"liveness" => {
|
|
||||||
result.to_owned()
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
result.to_owned()
|
|
||||||
}
|
}
|
||||||
|
let info = on_cloud_name_req_info(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,"",&source).await;
|
||||||
|
result = serde_json::to_string(&info).unwrap_or_else(|e|{
|
||||||
|
println!("Error serde info {} {}: ",&reqname,e);
|
||||||
|
String::from("")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// cloud_result = get_cloud_cache_req(&reqenv,&cloud, reqname: &str, reqname_job: &str).await
|
||||||
|
if opts.tsksrvcs.contains("liveness") {
|
||||||
|
liveness_result = get_cache_data("liveness",&reqenv).await;
|
||||||
}
|
}
|
||||||
|
if opts.tsksrvcs.contains("apps") {
|
||||||
|
apps_result = get_cache_data("apps",&reqenv).await;
|
||||||
|
}
|
||||||
|
if opts.tsksrvcs.contains("status") {
|
||||||
|
status_result = get_cache_data("status",&reqenv).await;
|
||||||
|
}
|
||||||
|
result = format!("{}:{}{}:{}{}:{}{}:{}{}",r#"{"clouds""#,&result,r#","apps""#,&apps_result,r#","status""#,&status_result,r#","liveness""#,&liveness_result,r#"}"#);
|
||||||
|
result.to_owned()
|
||||||
}
|
}
|
||||||
pub async fn config (
|
pub async fn config (
|
||||||
opts: KloudQueryConfigFilters,
|
opts: KloudQueryConfigFilters,
|
||||||
@ -72,7 +73,7 @@ pub async fn config (
|
|||||||
// println!("User: {} | {}",&user.user_id,&user.token);
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
|
||||||
// println!("LANG: {}",language);
|
// println!("LANG: {}",language);
|
||||||
// if prfx.as_str() == "ta" {
|
// if prfx.as_str() == "prfl" {
|
||||||
// let cur = db.colls.ta.entries.read();
|
// let cur = db.colls.ta.entries.read();
|
||||||
// }
|
// }
|
||||||
// let allow_origin = reqenv.websrvr().allow_origin;
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
@ -93,9 +94,9 @@ pub async fn config (
|
|||||||
.body(result.to_string())
|
.body(result.to_string())
|
||||||
.into_response())
|
.into_response())
|
||||||
},
|
},
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
|
/* let data = CollsData::get_prfl_entries(db.colls.clone()).await;
|
||||||
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
||||||
let mut skip = false;
|
let mut skip = false;
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
||||||
@ -110,7 +111,7 @@ pub async fn config (
|
|||||||
.into_response())
|
.into_response())
|
||||||
/*
|
/*
|
||||||
Ok(warp::reply::with_header(
|
Ok(warp::reply::with_header(
|
||||||
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
|
// warp::reply::json(&CollsData::get_prfl_entries(db.colls.clone()).await),
|
||||||
warp::http::Response::new(result),
|
warp::http::Response::new(result),
|
||||||
// warp::http::Response::new(body),
|
// warp::http::Response::new(body),
|
||||||
"Access-Control-Allow-Origin",
|
"Access-Control-Allow-Origin",
|
||||||
@ -118,32 +119,6 @@ pub async fn config (
|
|||||||
// Ok(warp::reply::json(&result))
|
// Ok(warp::reply::json(&result))
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
"tp" => {
|
|
||||||
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
|
||||||
let mut skip = false;
|
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
|
||||||
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
|
|
||||||
!skip
|
|
||||||
}).map(|(_,(_,value))| value).cloned().collect();
|
|
||||||
*/
|
|
||||||
let data_out: Vec<String> = Vec::new();
|
|
||||||
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
|
|
||||||
Ok(warp::http::Response::builder()
|
|
||||||
.body(result.to_string())
|
|
||||||
.into_response())
|
|
||||||
/*
|
|
||||||
Ok(warp::reply::with_header(
|
|
||||||
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
|
|
||||||
warp::http::Response::new(result),
|
|
||||||
// warp::http::Response::new(body),
|
|
||||||
"Access-Control-Allow-Origin",
|
|
||||||
&allow_origin))
|
|
||||||
// Ok(warp::reply::json(&result))
|
|
||||||
*/
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
//let result="";
|
//let result="";
|
||||||
let result = format!("list for '{}' undefined",&prfx);
|
let result = format!("list for '{}' undefined",&prfx);
|
||||||
@ -199,7 +174,7 @@ pub async fn provision (
|
|||||||
// println!("User: {} | {}",&user.user_id,&user.token);
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
|
||||||
// println!("LANG: {}",language);
|
// println!("LANG: {}",language);
|
||||||
// if prfx.as_str() == "ta" {
|
// if prfx.as_str() == "prfl" {
|
||||||
// let cur = db.colls.ta.entries.read();
|
// let cur = db.colls.ta.entries.read();
|
||||||
// }
|
// }
|
||||||
// let allow_origin = reqenv.websrvr().allow_origin;
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
@ -222,9 +197,9 @@ pub async fn provision (
|
|||||||
.body(result.to_string())
|
.body(result.to_string())
|
||||||
.into_response())
|
.into_response())
|
||||||
},
|
},
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
|
/* let data = CollsData::get_prfl_entries(db.colls.clone()).await;
|
||||||
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
||||||
let mut skip = false;
|
let mut skip = false;
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
||||||
@ -239,7 +214,7 @@ pub async fn provision (
|
|||||||
.into_response())
|
.into_response())
|
||||||
/*
|
/*
|
||||||
Ok(warp::reply::with_header(
|
Ok(warp::reply::with_header(
|
||||||
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
|
// warp::reply::json(&CollsData::get_prfl_entries(db.colls.clone()).await),
|
||||||
warp::http::Response::new(result),
|
warp::http::Response::new(result),
|
||||||
// warp::http::Response::new(body),
|
// warp::http::Response::new(body),
|
||||||
"Access-Control-Allow-Origin",
|
"Access-Control-Allow-Origin",
|
||||||
@ -247,32 +222,6 @@ pub async fn provision (
|
|||||||
// Ok(warp::reply::json(&result))
|
// Ok(warp::reply::json(&result))
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
"tp" => {
|
|
||||||
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
|
||||||
let mut skip = false;
|
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
|
||||||
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
|
|
||||||
!skip
|
|
||||||
}).map(|(_,(_,value))| value).cloned().collect();
|
|
||||||
*/
|
|
||||||
let data_out: Vec<String> = Vec::new();
|
|
||||||
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
|
|
||||||
Ok(warp::http::Response::builder()
|
|
||||||
.body(result.to_string())
|
|
||||||
.into_response())
|
|
||||||
/*
|
|
||||||
Ok(warp::reply::with_header(
|
|
||||||
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
|
|
||||||
warp::http::Response::new(result),
|
|
||||||
// warp::http::Response::new(body),
|
|
||||||
"Access-Control-Allow-Origin",
|
|
||||||
&allow_origin))
|
|
||||||
// Ok(warp::reply::json(&result))
|
|
||||||
*/
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
//let result="";
|
//let result="";
|
||||||
let result = format!("list for '{}' undefined",&prfx);
|
let result = format!("list for '{}' undefined",&prfx);
|
||||||
@ -319,25 +268,26 @@ pub async fn status (
|
|||||||
prfx: String,
|
prfx: String,
|
||||||
// path: warp::path::FullPath, headers: warp::http::HeaderMap
|
// path: warp::path::FullPath, headers: warp::http::HeaderMap
|
||||||
) -> Result<impl Reply, Rejection> {
|
) -> Result<impl Reply, Rejection> {
|
||||||
// dbg!("{:#?}",&db);
|
// dbg!("{:#?}",&db);
|
||||||
// dbg!("{:#?}",&header);
|
// dbg!("{:#?}",&header);
|
||||||
// dbg!("{:#?}",&opts);
|
// dbg!("{:#?}",&opts);
|
||||||
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/status", "status", &prfx);
|
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/status", "status", &prfx);
|
||||||
// if let Some(lang_str) = header.get("accept-language") {
|
// if let Some(lang_str) = header.get("accept-language") {
|
||||||
// println!("{:?}",&lang_str);
|
// println!("{:?}",&lang_str);
|
||||||
// }
|
// }
|
||||||
// println!("User: {} | {}",&user.user_id,&user.token);
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
|
||||||
|
println!("Status prfx: {}",&prfx);
|
||||||
// println!("LANG: {}",language);
|
// println!("LANG: {}",language);
|
||||||
// if prfx.as_str() == "ta" {
|
// if prfx.as_str() == "prfl" {
|
||||||
// let cur = db.colls.ta.entries.read();
|
// let cur = db.colls.ta.entries.read();
|
||||||
// }
|
// }
|
||||||
// let allow_origin = reqenv.websrvr().allow_origin;
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
// let result = cloud_req("status",&cloud,&reqenv,&opts).await;
|
// let result = cloud_req("status",&cloud,&reqenv,&opts).await;
|
||||||
// println!("Result: {}",&result);
|
// println!("Result: {}",&result);
|
||||||
// return Ok(warp::http::Response::builder()
|
// return Ok(warp::http::Response::builder()
|
||||||
// .body(result.to_string())
|
// .body(result.to_string())
|
||||||
// .into_response());
|
// .into_response());
|
||||||
match reqenv.user_authentication().await {
|
match reqenv.user_authentication().await {
|
||||||
Ok(_auth) => {
|
Ok(_auth) => {
|
||||||
// dbg!("auth: {}",&auth);
|
// dbg!("auth: {}",&auth);
|
||||||
@ -349,9 +299,9 @@ pub async fn status (
|
|||||||
.body(result.to_string())
|
.body(result.to_string())
|
||||||
.into_response())
|
.into_response())
|
||||||
},
|
},
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
|
/* let data = CollsData::get_prfl_entries(db.colls.clone()).await;
|
||||||
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
||||||
let mut skip = false;
|
let mut skip = false;
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
||||||
@ -366,7 +316,7 @@ pub async fn status (
|
|||||||
.into_response())
|
.into_response())
|
||||||
/*
|
/*
|
||||||
Ok(warp::reply::with_header(
|
Ok(warp::reply::with_header(
|
||||||
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
|
// warp::reply::json(&CollsData::get_prfl_entries(db.colls.clone()).await),
|
||||||
warp::http::Response::new(result),
|
warp::http::Response::new(result),
|
||||||
// warp::http::Response::new(body),
|
// warp::http::Response::new(body),
|
||||||
"Access-Control-Allow-Origin",
|
"Access-Control-Allow-Origin",
|
||||||
@ -374,32 +324,6 @@ pub async fn status (
|
|||||||
// Ok(warp::reply::json(&result))
|
// Ok(warp::reply::json(&result))
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
"tp" => {
|
|
||||||
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
|
||||||
let mut skip = false;
|
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
|
||||||
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
|
|
||||||
!skip
|
|
||||||
}).map(|(_,(_,value))| value).cloned().collect();
|
|
||||||
*/
|
|
||||||
let data_out: Vec<String> = Vec::new();
|
|
||||||
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
|
|
||||||
Ok(warp::http::Response::builder()
|
|
||||||
.body(result.to_string())
|
|
||||||
.into_response())
|
|
||||||
/*
|
|
||||||
Ok(warp::reply::with_header(
|
|
||||||
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
|
|
||||||
warp::http::Response::new(result),
|
|
||||||
// warp::http::Response::new(body),
|
|
||||||
"Access-Control-Allow-Origin",
|
|
||||||
&allow_origin))
|
|
||||||
// Ok(warp::reply::json(&result))
|
|
||||||
*/
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
//let result="";
|
//let result="";
|
||||||
let result = format!("list for '{}' undefined",&prfx);
|
let result = format!("list for '{}' undefined",&prfx);
|
||||||
@ -455,7 +379,7 @@ pub async fn liveness (
|
|||||||
// println!("User: {} | {}",&user.user_id,&user.token);
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
|
||||||
// println!("LANG: {}",language);
|
// println!("LANG: {}",language);
|
||||||
// if prfx.as_str() == "ta" {
|
// if prfx.as_str() == "prfl" {
|
||||||
// let cur = db.colls.ta.entries.read();
|
// let cur = db.colls.ta.entries.read();
|
||||||
// }
|
// }
|
||||||
// let allow_origin = reqenv.websrvr().allow_origin;
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
@ -478,9 +402,9 @@ pub async fn liveness (
|
|||||||
.body(result.to_string())
|
.body(result.to_string())
|
||||||
.into_response())
|
.into_response())
|
||||||
},
|
},
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
|
/* let data = CollsData::get_prfl_entries(db.colls.clone()).await;
|
||||||
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
||||||
let mut skip = false;
|
let mut skip = false;
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
||||||
@ -495,7 +419,7 @@ pub async fn liveness (
|
|||||||
.into_response())
|
.into_response())
|
||||||
/*
|
/*
|
||||||
Ok(warp::reply::with_header(
|
Ok(warp::reply::with_header(
|
||||||
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
|
// warp::reply::json(&CollsData::get_prfl_entries(db.colls.clone()).await),
|
||||||
warp::http::Response::new(result),
|
warp::http::Response::new(result),
|
||||||
// warp::http::Response::new(body),
|
// warp::http::Response::new(body),
|
||||||
"Access-Control-Allow-Origin",
|
"Access-Control-Allow-Origin",
|
||||||
@ -503,10 +427,88 @@ pub async fn liveness (
|
|||||||
// Ok(warp::reply::json(&result))
|
// Ok(warp::reply::json(&result))
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
"tp" => {
|
_ => {
|
||||||
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
|
//let result="";
|
||||||
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
|
let result = format!("list for '{}' undefined",&prfx);
|
||||||
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
// (AuthError::UserNotFoundError.to_string())
|
||||||
|
println!("{}",&result);
|
||||||
|
Ok(warp::http::Response::builder()
|
||||||
|
.body(result.to_string())
|
||||||
|
.into_response())
|
||||||
|
/*
|
||||||
|
Ok(warp::reply::with_header(
|
||||||
|
// warp::reply::json(&""),
|
||||||
|
warp::http::Response::new(result),
|
||||||
|
// warp::http::Response::new(body),
|
||||||
|
"Access-Control-Allow-Origin",
|
||||||
|
&allow_origin))
|
||||||
|
*/
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
let result = format!("Error: no credentials found");
|
||||||
|
println!("{}",e);
|
||||||
|
Ok(warp::http::Response::builder()
|
||||||
|
.body(result.to_string())
|
||||||
|
.into_response())
|
||||||
|
/*
|
||||||
|
Ok(warp::reply::with_header(
|
||||||
|
// warp::reply::json(&""),
|
||||||
|
warp::http::Response::new(result),
|
||||||
|
"Access-Control-Allow-Origin",
|
||||||
|
&allow_origin))
|
||||||
|
*/
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
pub async fn apps(
|
||||||
|
opts: KloudQueryConfigFilters,
|
||||||
|
header: HeaderMap<HeaderValue>,
|
||||||
|
method: Method,
|
||||||
|
// user: UserCtx,
|
||||||
|
db: DataDBs,
|
||||||
|
cloud: Cloud,
|
||||||
|
prfx: String,
|
||||||
|
// path: warp::path::FullPath, headers: warp::http::HeaderMap
|
||||||
|
) -> Result<impl Reply, Rejection> {
|
||||||
|
// dbg!("{:#?}",&db);
|
||||||
|
// dbg!("{:#?}",&header);
|
||||||
|
// dbg!("{:#?}",&opts);
|
||||||
|
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/apps", "apps", &prfx);
|
||||||
|
// if let Some(lang_str) = header.get("accept-language") {
|
||||||
|
// println!("{:?}",&lang_str);
|
||||||
|
// }
|
||||||
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
|
||||||
|
// println!("LANG: {}",language);
|
||||||
|
// if prfx.as_str() == "prfl" {
|
||||||
|
// let cur = db.colls.ta.entries.read();
|
||||||
|
// }
|
||||||
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
|
// let test = true;
|
||||||
|
// if test == true {
|
||||||
|
// let result = cloud_req("liveness",&cloud,&reqenv,&opts).await;
|
||||||
|
// println!("Result: {}",&result);
|
||||||
|
// return Ok(warp::http::Response::builder()
|
||||||
|
// .body(result.to_string())
|
||||||
|
// .into_response());
|
||||||
|
// } else {
|
||||||
|
match reqenv.user_authentication().await {
|
||||||
|
Ok(_auth) => {
|
||||||
|
// dbg!("auth: {}",&auth);
|
||||||
|
match prfx.as_str() {
|
||||||
|
"kloud" => {
|
||||||
|
let result = cloud_req("apps",&cloud,&reqenv,&opts).await;
|
||||||
|
println!("Result: {}",&result);
|
||||||
|
Ok(warp::http::Response::builder()
|
||||||
|
.body(result.to_string())
|
||||||
|
.into_response())
|
||||||
|
},
|
||||||
|
"prfl" => {
|
||||||
|
/* let data = CollsData::get_prfl_entries(db.colls.clone()).await;
|
||||||
|
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
||||||
let mut skip = false;
|
let mut skip = false;
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
||||||
@ -521,11 +523,11 @@ pub async fn liveness (
|
|||||||
.into_response())
|
.into_response())
|
||||||
/*
|
/*
|
||||||
Ok(warp::reply::with_header(
|
Ok(warp::reply::with_header(
|
||||||
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
|
// warp::reply::json(&CollsData::get_prfl_entries(db.colls.clone()).await),
|
||||||
warp::http::Response::new(result),
|
warp::http::Response::new(result),
|
||||||
// warp::http::Response::new(body),
|
// warp::http::Response::new(body),
|
||||||
"Access-Control-Allow-Origin",
|
"Access-Control-Allow-Origin",
|
||||||
&allow_origin))
|
&allow_origin))
|
||||||
// Ok(warp::reply::json(&result))
|
// Ok(warp::reply::json(&result))
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
|
@ -7,8 +7,8 @@ use warp::{
|
|||||||
};
|
};
|
||||||
use reqenv::ReqEnv;
|
use reqenv::ReqEnv;
|
||||||
/*
|
/*
|
||||||
use crate::topographic_anatomy::defs::{TaStore,TaData,TaQueryFilters,TopographicAnatomy};
|
use crate::app_profile::defs::{PrflStore,PrflData,PrflQueryFilters,Profile};
|
||||||
use zterton::kloud::utils::{lng_t,get_lang_items_str,load_lang};
|
use defs::kloud::utils::{lng_t,get_lang_items_str,load_lang};
|
||||||
//use zterton::models::{AppStore, AppData};
|
//use zterton::models::{AppStore, AppData};
|
||||||
|
|
||||||
use crate::defs::AppDB;
|
use crate::defs::AppDB;
|
||||||
@ -23,9 +23,6 @@ use crate::auth::defs::{
|
|||||||
custom_reject
|
custom_reject
|
||||||
};
|
};
|
||||||
*/
|
*/
|
||||||
// use ::topographic_anatomy::{TopographicAnatomy};
|
|
||||||
// use ::tracking_point::{TrackingPoint};
|
|
||||||
// use crate::tracking_point::defs::{TrackingPoint};
|
|
||||||
//use app_auth::{UserCtx};
|
//use app_auth::{UserCtx};
|
||||||
use kloud::{
|
use kloud::{
|
||||||
defs::{
|
defs::{
|
||||||
@ -62,7 +59,7 @@ pub async fn table(
|
|||||||
} else {
|
} else {
|
||||||
app_ctx = "";
|
app_ctx = "";
|
||||||
}
|
}
|
||||||
let lang_items = serde_json::to_string(&LangItems::new("langs/ta","es","yaml"))
|
let lang_items = serde_json::to_string(&LangItems::new("langs/prfl","es","yaml"))
|
||||||
.unwrap_or_else(|_| String::from(""));
|
.unwrap_or_else(|_| String::from(""));
|
||||||
let mut data_hash: HashMap<String, String> = HashMap::new();
|
let mut data_hash: HashMap<String, String> = HashMap::new();
|
||||||
data_hash.insert("lang".to_string(), lang.to_owned());
|
data_hash.insert("lang".to_string(), lang.to_owned());
|
||||||
@ -145,7 +142,7 @@ pub async fn list(
|
|||||||
// println!("User: {} | {}",&user.user_id,&user.token);
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
|
||||||
// println!("LANG: {}",language);
|
// println!("LANG: {}",language);
|
||||||
// if prfx.as_str() == "ta" {
|
// if prfx.as_str() == "prfl" {
|
||||||
// let cur = db.colls.ta.entries.read();
|
// let cur = db.colls.ta.entries.read();
|
||||||
// }
|
// }
|
||||||
// let allow_origin = reqenv.websrvr().allow_origin;
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
@ -153,9 +150,9 @@ pub async fn list(
|
|||||||
Ok(_auth) => {
|
Ok(_auth) => {
|
||||||
// dbg!("auth: {}",&auth);
|
// dbg!("auth: {}",&auth);
|
||||||
match prfx.as_str() {
|
match prfx.as_str() {
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
|
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
|
||||||
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
||||||
let mut skip = false;
|
let mut skip = false;
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
||||||
@ -178,32 +175,6 @@ pub async fn list(
|
|||||||
// Ok(warp::reply::json(&result))
|
// Ok(warp::reply::json(&result))
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
"tp" => {
|
|
||||||
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
|
|
||||||
let mut skip = false;
|
|
||||||
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
|
|
||||||
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
|
|
||||||
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
|
|
||||||
!skip
|
|
||||||
}).map(|(_,(_,value))| value).cloned().collect();
|
|
||||||
*/
|
|
||||||
let data_out: Vec<String> = Vec::new();
|
|
||||||
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
|
|
||||||
Ok(warp::http::Response::builder()
|
|
||||||
.body(result.to_string())
|
|
||||||
.into_response())
|
|
||||||
/*
|
|
||||||
Ok(warp::reply::with_header(
|
|
||||||
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
|
|
||||||
warp::http::Response::new(result),
|
|
||||||
// warp::http::Response::new(body),
|
|
||||||
"Access-Control-Allow-Origin",
|
|
||||||
&allow_origin))
|
|
||||||
// Ok(warp::reply::json(&result))
|
|
||||||
*/
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
//let result="";
|
//let result="";
|
||||||
let result = format!("list for '{}' undefined",&prfx);
|
let result = format!("list for '{}' undefined",&prfx);
|
||||||
@ -260,7 +231,7 @@ pub async fn list(
|
|||||||
}
|
}
|
||||||
// warp::generic::Either<(std::string::String,), (std::string::String,)>
|
// warp::generic::Either<(std::string::String,), (std::string::String,)>
|
||||||
// pub async fn create(
|
// pub async fn create(
|
||||||
// create: TopographicAnatomy,
|
// create: Profile,
|
||||||
// db: DataDBs,
|
// db: DataDBs,
|
||||||
// prfx: String,
|
// prfx: String,
|
||||||
// ) -> Result<impl Reply, Rejection> {
|
// ) -> Result<impl Reply, Rejection> {
|
||||||
@ -298,8 +269,8 @@ pub async fn insert(
|
|||||||
// dbg!("auth: {}",&auth);
|
// dbg!("auth: {}",&auth);
|
||||||
/*
|
/*
|
||||||
match prfx.as_str() {
|
match prfx.as_str() {
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
match serde_json::from_str::<TopographicAnatomy>(&data.str_data) {
|
match serde_json::from_str::<Profile>(&data.str_data) {
|
||||||
Ok(item) => {
|
Ok(item) => {
|
||||||
if db.colls.ta.entries.read().contains_key(&data.id) {
|
if db.colls.ta.entries.read().contains_key(&data.id) {
|
||||||
if data.id != item.id {
|
if data.id != item.id {
|
||||||
@ -333,32 +304,6 @@ pub async fn insert(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"tp" => {
|
|
||||||
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
|
|
||||||
match serde_json::from_str::<TrackingPoint>(&data.str_data) {
|
|
||||||
Ok(item) => {
|
|
||||||
if db.colls.tp.entries.read().contains_key(&data.id) {
|
|
||||||
db.colls.tp.entries.write().insert(item.id.to_owned(), item.to_owned());
|
|
||||||
println!("UPDATED tracking_point id: {}",&item.id);
|
|
||||||
result = format!("UPDATED: {}",&item.id);
|
|
||||||
} else {
|
|
||||||
db.colls.tp.entries.write().insert(item.id.to_owned(), item.to_owned());
|
|
||||||
println!("CREATED tracking_point id: {}",&item.id);
|
|
||||||
result = format!("CREATED: {}",&item.id);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
println!("Error {} parse insert : {}",&prfx,e);
|
|
||||||
result = format!("ERROR: {} parse insert: '{}'",&prfx,e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
result = format!("ERROR: list for '{}' undefined",&prfx);
|
|
||||||
// (AuthError::UserNotFoundError.to_string())
|
|
||||||
println!("{}",&result);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@ -394,7 +339,7 @@ pub async fn delete(
|
|||||||
// dbg!("auth: {}",&auth);
|
// dbg!("auth: {}",&auth);
|
||||||
/*
|
/*
|
||||||
match prfx.as_str() {
|
match prfx.as_str() {
|
||||||
"ta" => {
|
"prfl" => {
|
||||||
if db.colls.ta.entries.read().contains_key(&data.id) {
|
if db.colls.ta.entries.read().contains_key(&data.id) {
|
||||||
db.colls.ta.entries.write().remove(&data.id);
|
db.colls.ta.entries.write().remove(&data.id);
|
||||||
if db.colls.ta.entries.read().contains_key(&data.id) {
|
if db.colls.ta.entries.read().contains_key(&data.id) {
|
||||||
@ -408,26 +353,6 @@ pub async fn delete(
|
|||||||
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
|
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"tp" => {
|
|
||||||
if db.colls.tp.entries.read().contains_key(&data.id) {
|
|
||||||
db.colls.tp.entries.write().remove(&data.id);
|
|
||||||
if db.colls.tp.entries.read().contains_key(&data.id) {
|
|
||||||
println!("NOT DELETED tracking_point id: {}",&data.id);
|
|
||||||
result = format!("ERROR: tracking_point NOT deleted: '{}'",&data.id);
|
|
||||||
} else {
|
|
||||||
println!("DELETED tracking_point id: {}",&data.id);
|
|
||||||
result = format!("DELETED: tracking_point: '{}'",&data.id);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
result = format!("ERROR: list for '{}' undefined",&prfx);
|
|
||||||
// (AuthError::UserNotFoundError.to_string())
|
|
||||||
println!("{}",&result);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
*/
|
*/
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -8,9 +8,9 @@ use warp::{
|
|||||||
use reqenv::ReqEnv;
|
use reqenv::ReqEnv;
|
||||||
use app_env::profile::Profile;
|
use app_env::profile::Profile;
|
||||||
/*
|
/*
|
||||||
use crate::topographic_anatomy::defs::{TaStore,TaData,TaQueryFilters,TopographicAnatomy};
|
use crate::app_profile::defs::{TaStore,TaData,TaQueryFilters,Profile};
|
||||||
use zterton::kloud::utils::{lng_t,get_lang_items_str,load_lang};
|
use defs::kloud::utils::{lng_t,get_lang_items_str,load_lang};
|
||||||
//use zterton::models::{AppStore, AppData};
|
//use defs::app_env::{AppStore, AppData};
|
||||||
|
|
||||||
use crate::defs::AppDB;
|
use crate::defs::AppDB;
|
||||||
use crate::auth::defs::{
|
use crate::auth::defs::{
|
||||||
@ -24,9 +24,7 @@ use crate::auth::defs::{
|
|||||||
custom_reject
|
custom_reject
|
||||||
};
|
};
|
||||||
*/
|
*/
|
||||||
// use ::topographic_anatomy::{TopographicAnatomy};
|
// use ::app_profile::{Profile};
|
||||||
// use ::tracking_point::{TrackingPoint};
|
|
||||||
// use crate::tracking_point::defs::{TrackingPoint};
|
|
||||||
//use app_auth::{UserCtx};
|
//use app_auth::{UserCtx};
|
||||||
use kloud::{
|
use kloud::{
|
||||||
defs::{
|
defs::{
|
||||||
@ -37,7 +35,32 @@ use kloud::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use crate::defs::{DataDBs}; // ,CollsData};
|
use crate::defs::{DataDBs}; // ,CollsData};
|
||||||
|
use app_auth::{User};
|
||||||
|
|
||||||
|
pub async fn get_defs_from_req(reqenv: &ReqEnv, prfx: String, user: &User) -> String {
|
||||||
|
let prfx_path: String;
|
||||||
|
if prfx.is_empty() {
|
||||||
|
prfx_path = String::from("ui");
|
||||||
|
} else {
|
||||||
|
prfx_path = prfx;
|
||||||
|
}
|
||||||
|
let mut path: String;
|
||||||
|
if user.user_id.is_empty() {
|
||||||
|
path = format!("{}/profiles/{}/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx_path,"default");
|
||||||
|
} else {
|
||||||
|
path = format!("{}/profiles/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx_path);
|
||||||
|
}
|
||||||
|
if ! std::path::Path::new(&path).exists() {
|
||||||
|
path = format!("{}/profiles/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx_path);
|
||||||
|
}
|
||||||
|
let content = Profile::load_fs_content(path.into());
|
||||||
|
// let lang = opts.lang.unwrap_or_else(|| String::from("es"));
|
||||||
|
// let section = opts.section.unwrap_or_else(|| String::from(""));
|
||||||
|
// let lang_items = LangItems::new("langs/ta",&lang,"yaml");
|
||||||
|
// let result = lang_items.get_items_str(§ion);
|
||||||
|
let res = Profile::to_yaml(content); // String::from("");
|
||||||
|
serde_json::to_string(&res).unwrap_or_else(|_| String::from(""))
|
||||||
|
}
|
||||||
// warp::generic::Either<(std::string::String,), (std::string::String,)>
|
// warp::generic::Either<(std::string::String,), (std::string::String,)>
|
||||||
pub async fn langs(
|
pub async fn langs(
|
||||||
opts: KloudQueryLangFilters,
|
opts: KloudQueryLangFilters,
|
||||||
@ -65,7 +88,7 @@ pub async fn langs(
|
|||||||
// dbg!("auth: {}",&auth);
|
// dbg!("auth: {}",&auth);
|
||||||
let lang = opts.lang.unwrap_or_else(|| String::from("es"));
|
let lang = opts.lang.unwrap_or_else(|| String::from("es"));
|
||||||
let section = opts.section.unwrap_or_else(|| String::from(""));
|
let section = opts.section.unwrap_or_else(|| String::from(""));
|
||||||
let lang_items = LangItems::new("langs/ta",&lang,"yaml");
|
let lang_items = LangItems::new("langs/prfl",&lang,"yaml");
|
||||||
let result = lang_items.get_items_str(§ion);
|
let result = lang_items.get_items_str(§ion);
|
||||||
Ok(warp::http::Response::builder()
|
Ok(warp::http::Response::builder()
|
||||||
.body(result.to_string())
|
.body(result.to_string())
|
||||||
@ -107,7 +130,7 @@ pub async fn defs(
|
|||||||
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/defs", "defs", &prfx);
|
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/defs", "defs", &prfx);
|
||||||
// let allow_origin = reqenv.websrvr().allow_origin;
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
match reqenv.user_authentication().await {
|
match reqenv.user_authentication().await {
|
||||||
Ok(auth) => {
|
Ok(_auth) => {
|
||||||
// dbg!("auth: {}",&auth);
|
// dbg!("auth: {}",&auth);
|
||||||
// println!("User: {} | {}",&user.user_id,&user.token);
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
// if let Some(lang) = reqtasks.params().get("lang") {
|
// if let Some(lang) = reqtasks.params().get("lang") {
|
||||||
@ -117,18 +140,7 @@ pub async fn defs(
|
|||||||
// }
|
// }
|
||||||
// log::debug!("LANG: {} - {}",language, lang);
|
// log::debug!("LANG: {} - {}",language, lang);
|
||||||
// dbg!("LANG: {} - {}",language, lang);
|
// dbg!("LANG: {} - {}",language, lang);
|
||||||
|
let result = get_defs_from_req(&reqenv, prfx.to_owned(), &reqenv.get_user().await).await;
|
||||||
let mut path = format!("{}/profiles/{}/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx,&auth.user_id);
|
|
||||||
if ! std::path::Path::new(&path).exists() {
|
|
||||||
path = format!("{}/profiles/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx);
|
|
||||||
}
|
|
||||||
let content = Profile::load_fs_content(path.into());
|
|
||||||
// let lang = opts.lang.unwrap_or_else(|| String::from("es"));
|
|
||||||
// let section = opts.section.unwrap_or_else(|| String::from(""));
|
|
||||||
// let lang_items = LangItems::new("langs/ta",&lang,"yaml");
|
|
||||||
// let result = lang_items.get_items_str(§ion);
|
|
||||||
let res = Profile::to_yaml(content); // String::from("");
|
|
||||||
let result = serde_json::to_string(&res).unwrap_or_else(|_| String::from(""));
|
|
||||||
Ok(warp::http::Response::builder()
|
Ok(warp::http::Response::builder()
|
||||||
.body(result.to_string())
|
.body(result.to_string())
|
||||||
.into_response())
|
.into_response())
|
||||||
|
231
src/handlers/h_home.rs
Normal file
231
src/handlers/h_home.rs
Normal file
@ -0,0 +1,231 @@
|
|||||||
|
use std::collections::{HashMap,BTreeMap};
|
||||||
|
use warp::{
|
||||||
|
// http::{StatusCode},
|
||||||
|
http::{method::Method, HeaderMap, HeaderValue},
|
||||||
|
Reply, Rejection,
|
||||||
|
};
|
||||||
|
use reqenv::ReqEnv;
|
||||||
|
// use app_env::profile::Profile;
|
||||||
|
use app_auth::{UserCtx};
|
||||||
|
|
||||||
|
// use kloud::{
|
||||||
|
// defs::{
|
||||||
|
// KloudQueryConfigFilters,
|
||||||
|
// },
|
||||||
|
// };
|
||||||
|
use clds::defs::{KloudCheckHome,CloudCheckItem};
|
||||||
|
use clds::status::{get_statusinfo_fileslist,load_statusinfo};
|
||||||
|
use clds::clouds::defs::{Cloud,SrvcsHostInfOut,AppOut,AppsrvcInfo,AppsrvcInfOut, TskSrvcOut, TsksrvcInfo, TsksrvcInfOut,InfoStatus};
|
||||||
|
use clds::clouds::on_req::{get_cache_data};
|
||||||
|
// use app_env::appenv::{Rol,Policy};
|
||||||
|
|
||||||
|
use crate::defs::{KldCheck, MapCheckInfo, DataDBs}; // ,CollsData};
|
||||||
|
use crate::handlers::h_defs::{get_defs_from_req};
|
||||||
|
|
||||||
|
pub fn tsks_info_on_role(role: String, tsksrvcs: Vec<TsksrvcInfo>) -> Vec<TsksrvcInfOut> {
|
||||||
|
tsksrvcs.into_iter().map(|tsk| {
|
||||||
|
let srvc = match role.as_str() {
|
||||||
|
"admin" =>
|
||||||
|
TskSrvcOut {
|
||||||
|
name: tsk.srvc.name.to_owned(),
|
||||||
|
path: tsk.srvc.path.to_owned(),
|
||||||
|
req: tsk.srvc.req.to_owned(),
|
||||||
|
target: tsk.srvc.target.to_owned(),
|
||||||
|
liveness: tsk.srvc.liveness.to_owned(),
|
||||||
|
critical: tsk.srvc.critical.to_owned(),
|
||||||
|
},
|
||||||
|
_ =>
|
||||||
|
TskSrvcOut {
|
||||||
|
name: tsk.srvc.name.to_owned(),
|
||||||
|
path: String::from(""),
|
||||||
|
req: String::from(""),
|
||||||
|
target: tsk.srvc.target.to_owned(),
|
||||||
|
liveness: String::from(""),
|
||||||
|
critical: tsk.srvc.critical.to_owned(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
TsksrvcInfOut {
|
||||||
|
name: tsk.name.to_owned(),
|
||||||
|
info: tsk.info.to_owned(),
|
||||||
|
srvc
|
||||||
|
}
|
||||||
|
}).collect()
|
||||||
|
}
|
||||||
|
pub fn app_info_on_role(role: String, appsrvcs: Vec<AppsrvcInfo>) -> Vec<AppsrvcInfOut> {
|
||||||
|
appsrvcs.into_iter().map(|app| {
|
||||||
|
let srvc = match role.as_str() {
|
||||||
|
"admin" =>
|
||||||
|
AppOut {
|
||||||
|
name: app.srvc.name.to_owned(),
|
||||||
|
path: app.srvc.path.to_owned(),
|
||||||
|
req: app.srvc.req.to_owned(),
|
||||||
|
target: app.srvc.target.to_owned(),
|
||||||
|
liveness: app.srvc.liveness.to_owned(),
|
||||||
|
critical: app.srvc.critical.to_owned(),
|
||||||
|
},
|
||||||
|
_ =>
|
||||||
|
AppOut {
|
||||||
|
name: app.srvc.name.to_owned(),
|
||||||
|
path: String::from(""),
|
||||||
|
req: String::from(""),
|
||||||
|
target: app.srvc.target.to_owned(),
|
||||||
|
liveness: String::from(""),
|
||||||
|
critical: app.srvc.critical.to_owned(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
AppsrvcInfOut {
|
||||||
|
name: app.name.to_owned(),
|
||||||
|
info: app.info.to_owned(),
|
||||||
|
srvc
|
||||||
|
}
|
||||||
|
}).collect()
|
||||||
|
}
|
||||||
|
pub fn collect_clouds_check_items(role: &str, items: &Vec<CloudCheckItem> ) -> BTreeMap<String,Vec<SrvcsHostInfOut>> {
|
||||||
|
let mut m_items = BTreeMap::new();
|
||||||
|
items.iter().for_each(|itm| {
|
||||||
|
let liveness: Vec<SrvcsHostInfOut>;
|
||||||
|
liveness = itm.liveness.to_owned().into_iter().map(|it| {
|
||||||
|
SrvcsHostInfOut {
|
||||||
|
hostname: format!("{}",&it.hostname),
|
||||||
|
tsksrvcs: tsks_info_on_role(role.to_owned(), it.tsksrvcs),
|
||||||
|
appsrvcs: app_info_on_role(role.to_owned(), it.appsrvcs),
|
||||||
|
}
|
||||||
|
}).collect();
|
||||||
|
m_items.insert(format!("{}",&itm.name),liveness);
|
||||||
|
});
|
||||||
|
m_items
|
||||||
|
}
|
||||||
|
pub async fn get_info_status(reqenv: &ReqEnv,kld: &KloudCheckHome) -> Vec<InfoStatus> {
|
||||||
|
let kld_statusinfo_files: Vec<String> = get_statusinfo_fileslist(&reqenv.config().state_path,&kld.name).await.unwrap_or_else(|e|{
|
||||||
|
println!("Error on infos {}: {}",&kld.name,e);
|
||||||
|
Vec::new()
|
||||||
|
});
|
||||||
|
load_statusinfo(&format!("{}/{}",&reqenv.config().state_path,&kld.name), kld_statusinfo_files).await
|
||||||
|
}
|
||||||
|
pub async fn get_tsks_apps_check(reqenv: &ReqEnv,_auth: UserCtx, role: &str, prfx: String) -> String {
|
||||||
|
let result_data: String;
|
||||||
|
let res_liveness: Vec<KloudCheckHome>;
|
||||||
|
let result_status = get_cache_data("status",&reqenv).await;
|
||||||
|
if result_status.is_empty() {
|
||||||
|
let result_liveness = get_cache_data("liveness",&reqenv).await;
|
||||||
|
result_data = get_cache_data("apps",&reqenv).await;
|
||||||
|
res_liveness = serde_json::from_str(&result_liveness).unwrap_or_else(|e| {
|
||||||
|
println!("Error serde liveness json: {}",e);
|
||||||
|
Vec::new()
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
result_data = result_status;
|
||||||
|
res_liveness = Vec::new();
|
||||||
|
}
|
||||||
|
let res_data: Vec<KloudCheckHome> = serde_json::from_str(&result_data).unwrap_or_else(|e| {
|
||||||
|
println!("Error serde status json: {}",e);
|
||||||
|
Vec::new()
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut kld_check: Vec<KldCheck> = Vec::new();
|
||||||
|
for kld in res_data {
|
||||||
|
// let mut list_groups = String::from("");
|
||||||
|
let mut grp_check_hash: HashMap<String, MapCheckInfo> = HashMap::new();
|
||||||
|
kld.groups.iter().for_each(|grp| {
|
||||||
|
// if !list_groups.is_empty() {
|
||||||
|
// list_groups.push(',');
|
||||||
|
// }
|
||||||
|
// list_groups.push_str(&grp.name);
|
||||||
|
grp_check_hash.insert(format!("{}",grp.name),collect_clouds_check_items(&role,&grp.items));
|
||||||
|
});
|
||||||
|
|
||||||
|
kld_check.push(KldCheck {
|
||||||
|
name: kld.name.to_owned(),
|
||||||
|
liveness: HashMap::new(),
|
||||||
|
apps: grp_check_hash,
|
||||||
|
infos: get_info_status(reqenv, &kld).await,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
for kld in res_liveness {
|
||||||
|
let mut grp_check_hash: HashMap<String, MapCheckInfo> = HashMap::new();
|
||||||
|
kld.groups.iter().for_each(|grp| {
|
||||||
|
grp_check_hash.insert(format!("{}",grp.name),collect_clouds_check_items(&role,&grp.items));
|
||||||
|
});
|
||||||
|
// dbg!(&kld_statusinfo_files);
|
||||||
|
if kld_check.len() == 0 {
|
||||||
|
kld_check.push(KldCheck {
|
||||||
|
name: kld.name.to_owned(),
|
||||||
|
apps: HashMap::new(),
|
||||||
|
liveness: grp_check_hash,
|
||||||
|
infos: get_info_status(reqenv, &kld).await,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
let mut kldix = 0;
|
||||||
|
kld_check.iter().enumerate().for_each(|(idx,k)| {
|
||||||
|
if k.name == kld.name {
|
||||||
|
kldix = idx;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
kld_check[kldix].liveness = grp_check_hash.to_owned();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let check_json=serde_json::to_string(&kld_check).unwrap_or_else(|e|{
|
||||||
|
println!("Error serde from value: {}",e);
|
||||||
|
String::from("")
|
||||||
|
});
|
||||||
|
let defs_json = get_defs_from_req(&reqenv, prfx.to_owned(), &reqenv.get_user().await).await;
|
||||||
|
format!("{}:{}{}:{}{}",r#"{"check""#,&check_json,r#","defs""#,&defs_json,r#"}"#).to_owned()
|
||||||
|
// serde_json::to_string(self).unwrap_or_else(|_| String::from("")).replace("\n","")
|
||||||
|
}
|
||||||
|
pub async fn home(
|
||||||
|
header: HeaderMap<HeaderValue>,
|
||||||
|
method: Method,
|
||||||
|
// user: UserCtx,
|
||||||
|
db: DataDBs,
|
||||||
|
_cloud: Cloud,
|
||||||
|
prfx: String,
|
||||||
|
// headers: warp::http::HeaderMap
|
||||||
|
// path: warp::path::FullPath, headers: warp::http::HeaderMap
|
||||||
|
) -> Result<impl Reply, Rejection> {
|
||||||
|
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/home", "home", &prfx);
|
||||||
|
// let allow_origin = reqenv.websrvr().allow_origin;
|
||||||
|
// let opts = KloudQueryConfigFilters::default();
|
||||||
|
// let source = format!("{}/{}","ma","");
|
||||||
|
match reqenv.user_authentication().await {
|
||||||
|
Ok(auth) => {
|
||||||
|
// dbg!("auth: {}",&auth);
|
||||||
|
// println!("User: {} | {}",&user.user_id,&user.token);
|
||||||
|
// if let Some(lang) = reqtasks.params().get("lang") {
|
||||||
|
// res.render_json_text(&get_lang_items_str("langs",req_lang,"yaml"))
|
||||||
|
// } else {
|
||||||
|
// res.render_json_text(&get_lang_items_str("langs",&reqtasks.lang(),"yaml"))
|
||||||
|
// }
|
||||||
|
// log::debug!("LANG: {} - {}",language, lang);
|
||||||
|
// dbg!("LANG: {} - {}",language, lang);
|
||||||
|
let role = reqenv.req.user_role().await;
|
||||||
|
// reqenv.is_admin()
|
||||||
|
let result = get_tsks_apps_check(&reqenv,auth,&role,prfx.to_owned()).await;
|
||||||
|
// let mut path = format!("{}/profiles/{}/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx,&auth.user_id);
|
||||||
|
// if ! std::path::Path::new(&path).exists() {
|
||||||
|
// path = format!("{}/profiles/{}/defs.yaml",reqenv.websrvr().resources_path,&prfx);
|
||||||
|
// }
|
||||||
|
// let content = Profile::load_fs_content(path.into());
|
||||||
|
|
||||||
|
// let lang = opts.lang.unwrap_or_else(|| String::from("es"));
|
||||||
|
// let section = opts.section.unwrap_or_else(|| String::from(""));
|
||||||
|
// let lang_items = LangItems::new("langs/ta",&lang,"yaml");
|
||||||
|
// let result = lang_items.get_items_str(§ion);
|
||||||
|
|
||||||
|
// let res = Profile::to_yaml(content); // String::from("");
|
||||||
|
// let result = serde_json::to_string(&res).unwrap_or_else(|_| String::from(""));
|
||||||
|
|
||||||
|
|
||||||
|
Ok(warp::http::Response::builder()
|
||||||
|
.body(result.to_string())
|
||||||
|
.into_response())
|
||||||
|
},
|
||||||
|
Err(_e) => {
|
||||||
|
// let result = format!("Error: no credentials found");
|
||||||
|
// println!("{}",e);
|
||||||
|
let result = get_tsks_apps_check(&reqenv,UserCtx::default(),"",prfx.to_owned()).await;
|
||||||
|
Ok(warp::http::Response::builder()
|
||||||
|
.body(result.to_string())
|
||||||
|
.into_response())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
196
src/main.rs
196
src/main.rs
@ -24,7 +24,13 @@ use crate::defs::{DataDBs,CollsData,load_cloud_env};
|
|||||||
use clds::clouds::defs::{
|
use clds::clouds::defs::{
|
||||||
Cloud,
|
Cloud,
|
||||||
};
|
};
|
||||||
use clds::clouds::on_clouds::{make_cloud_cache,run_clouds_check};
|
use clds::clouds::utils::{
|
||||||
|
env_cloud,
|
||||||
|
};
|
||||||
|
use clds::clouds::on_req::{
|
||||||
|
run_cache_data,
|
||||||
|
make_cloud_cache,
|
||||||
|
};
|
||||||
use reqenv::ReqEnv;
|
use reqenv::ReqEnv;
|
||||||
|
|
||||||
// static WEBSERVER: AtomicUsize = AtomicUsize::new(0);
|
// static WEBSERVER: AtomicUsize = AtomicUsize::new(0);
|
||||||
@ -60,7 +66,7 @@ async fn up_web_server(webpos: usize) -> Result<()> {
|
|||||||
format!("authors: {}",PKG_AUTHORS),
|
format!("authors: {}",PKG_AUTHORS),
|
||||||
format!("{}",PKG_DESCRIPTION),
|
format!("{}",PKG_DESCRIPTION),
|
||||||
);
|
);
|
||||||
zterton::init_app(&mut app_env,verbose).await.unwrap_or_else(|e|
|
webenv::init_app(&mut app_env,verbose).await.unwrap_or_else(|e|
|
||||||
panic!("Error loadding app environment {}",e)
|
panic!("Error loadding app environment {}",e)
|
||||||
);
|
);
|
||||||
let config = app_env.get_curr_websrvr_config();
|
let config = app_env.get_curr_websrvr_config();
|
||||||
@ -78,7 +84,7 @@ async fn up_web_server(webpos: usize) -> Result<()> {
|
|||||||
if verbose > 0 {
|
if verbose > 0 {
|
||||||
println!("Loading webserver: {} ({})",&config.name,&app_env.curr_web);
|
println!("Loading webserver: {} ({})",&config.name,&app_env.curr_web);
|
||||||
}
|
}
|
||||||
let (app, socket) = zterton::start_web(&mut app_env).await;
|
let (app, socket) = webenv::start_web(&mut app_env).await;
|
||||||
if verbose > 0 {
|
if verbose > 0 {
|
||||||
println!("Load app store ...");
|
println!("Load app store ...");
|
||||||
}
|
}
|
||||||
@ -136,26 +142,29 @@ async fn up_web_server(webpos: usize) -> Result<()> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
let mut cloud = Cloud::default();
|
let mut cloud = Cloud::default();
|
||||||
load_cloud_env(&mut cloud).await;
|
env_cloud("*", &mut cloud.env).await?;
|
||||||
// app_api.to_owned()
|
load_cloud_env(&mut cloud).await;
|
||||||
// If not graphQL comment/remove next line
|
// If not graphQL comment/remove next line
|
||||||
let gql_api=graphql::graphql(gql_path, data_dbs.clone(),cors.clone()).await; //.with(cors.clone());
|
let gql_api=graphql::graphql(gql_path, data_dbs.clone(),cors.clone()).await; //.with(cors.clone());
|
||||||
|
|
||||||
// // Add ALL ENTITIES to work with here
|
// // Add ALL ENTITIES to work with here
|
||||||
let kloud_api = filters::CollFilters::new("kloud")
|
let kloud_api = filters::CollFilters::new(&config.prefix)
|
||||||
.filters_config(data_dbs.clone(),cloud.clone(),cors.clone());
|
.filters_config(data_dbs.clone(),cloud.clone(),cors.clone());
|
||||||
|
|
||||||
let file_api = app_file_filters::files(app_store.clone(),auth_store.clone()).with(cors.clone());
|
let file_api = app_file_filters::files(app_store.clone(),auth_store.clone()).with(cors.clone());
|
||||||
// Path for static files, better to be LAST
|
// Path for static files, better to be LAST
|
||||||
let fs_api = warp::fs::dir(html_path).with(warp::compression::gzip());
|
let fs_api = warp::fs::dir(html_path).with(warp::compression::gzip());
|
||||||
// Recover and handle errors
|
|
||||||
|
let home_api = filters::CollFilters::new(&config.prefix)
|
||||||
|
.filters_home(data_dbs.clone(),cloud.clone(),cors.clone(),"info");
|
||||||
|
|
||||||
let app_api = auth_api
|
let app_api = auth_api
|
||||||
.or(gqli_api).or(gql_api)
|
.or(gqli_api).or(gql_api)
|
||||||
.or(kloud_api)
|
.or(home_api)
|
||||||
.or(file_api)
|
.or(kloud_api)
|
||||||
.or(fs_api)
|
.or(file_api)
|
||||||
.recover(move | error: warp::Rejection| handle_rejection(error, app_store.clone()))
|
.or(fs_api)
|
||||||
.boxed();
|
.recover(move | error: warp::Rejection| handle_rejection(error, app_store.clone()))
|
||||||
|
.boxed();
|
||||||
// Wrap routes with log to get info
|
// Wrap routes with log to get info
|
||||||
let routes = app_api.with(warp::log(log_name));
|
let routes = app_api.with(warp::log(log_name));
|
||||||
//let routes = app_api.with(cors).with(warp::log(log_name));
|
//let routes = app_api.with(cors).with(warp::log(log_name));
|
||||||
@ -219,74 +228,49 @@ async fn set_reqenv(app_env: &AppEnv,verbose: isize) -> ReqEnv {
|
|||||||
"/config", "config", "kloud"
|
"/config", "config", "kloud"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
pub async fn run_cache_clouds() -> Result<()> {
|
pub async fn run_on_clouds(target_task: &str) -> Result<()> {
|
||||||
let (arg_cfg_path,arg_env_path) = get_args();
|
let (arg_cfg_path,arg_env_path) = get_args();
|
||||||
let now = chrono::Utc::now().timestamp();
|
let now = chrono::Utc::now().timestamp();
|
||||||
let verbose = envmnt::get_isize("DEBUG", 0);
|
let verbose = envmnt::get_isize("DEBUG", 0);
|
||||||
if verbose > 0 {
|
if verbose > 0 {
|
||||||
println!("Cache service on Clouds: run {} __________ {} {} ",&now,&arg_cfg_path,&arg_env_path);
|
println!("{} service on Clouds: run {} __________ {} {} ",&target_task,&now,&arg_cfg_path,&arg_env_path);
|
||||||
}
|
}
|
||||||
let (cloud, app_env) = match get_app_env(arg_cfg_path,verbose).await {
|
let (cloud, app_env) = match get_app_env(arg_cfg_path,verbose).await {
|
||||||
Ok((c,e)) => (c,e),
|
Ok((c,e)) => (c,e),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
println!("Cache service on Clouds: done {} __________ ",&now);
|
println!("{} service on Clouds: errors {} __________ ",&target_task,&now);
|
||||||
return Err(e);
|
return Err(e);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let reqenv = set_reqenv(&app_env,verbose).await;
|
let reqenv = set_reqenv(&app_env,verbose).await;
|
||||||
let res = make_cloud_cache(&reqenv,&cloud).await;
|
let res = match target_task {
|
||||||
if verbose > 0 {
|
"monitor" => {
|
||||||
println!("Cache service on Clouds: done {} __________ ",&now);
|
let monitor_rules = MonitorRules::load(
|
||||||
}
|
&app_env.config.monitor_rules_path,
|
||||||
res
|
&app_env.config.monitor_rules_file,
|
||||||
}
|
&app_env.config.monitor_rules_format
|
||||||
pub async fn run_check_clouds() -> Result<()> {
|
);
|
||||||
let (arg_cfg_path,arg_env_path) = get_args();
|
if monitor_rules.rules.len() == 0 {
|
||||||
let now = chrono::Utc::now().timestamp();
|
eprintln!("No monitor rules found");
|
||||||
let verbose = envmnt::get_isize("DEBUG", 0);
|
return Ok(());
|
||||||
if verbose > 0 {
|
}
|
||||||
println!("Check Cloud services: run {} __________ {} {} ",&now,&arg_cfg_path,&arg_env_path);
|
monitor_rules.run(cloud,app_env).await
|
||||||
}
|
},
|
||||||
let (cloud, app_env) = match get_app_env(arg_cfg_path,verbose).await {
|
"apps" =>
|
||||||
Ok((c,e)) => (c,e),
|
run_cache_data(&target_task,&reqenv,&cloud,&target_task,"*").await,
|
||||||
Err(e) => {
|
"liveness" =>
|
||||||
println!("Check Cloud service: done {} __________ ",&now);
|
run_cache_data(&target_task,&reqenv,&cloud,"monitor,liveness","*").await,
|
||||||
return Err(e);
|
"status" =>
|
||||||
},
|
run_cache_data(&target_task,&reqenv,&cloud,"monitor,status","*").await,
|
||||||
|
"cache" =>
|
||||||
|
make_cloud_cache(&reqenv,&cloud).await,
|
||||||
|
_ => {
|
||||||
|
eprintln!("Error scheduling task {}",&target_task);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let reqenv = set_reqenv(&app_env,verbose).await;
|
|
||||||
let res = run_clouds_check(&reqenv,&cloud).await;
|
|
||||||
if verbose > 0 {
|
if verbose > 0 {
|
||||||
println!("Check Cloud service: done {} __________ ",&now);
|
println!("{} service on Clouds: done {} __________ ",&target_task,&now);
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
pub async fn run_clouds_monitor() -> Result<()> {
|
|
||||||
let (arg_cfg_path,arg_env_path) = get_args();
|
|
||||||
let now = chrono::Utc::now().timestamp();
|
|
||||||
let verbose = envmnt::get_isize("DEBUG", 0);
|
|
||||||
if verbose > 0 {
|
|
||||||
println!("Monitor Cloud: run {} __________ {} {} ",&now,arg_cfg_path,&arg_env_path);
|
|
||||||
}
|
|
||||||
let (cloud, app_env) = match get_app_env(arg_cfg_path,verbose).await {
|
|
||||||
Ok((c,e)) => (c,e),
|
|
||||||
Err(e) => {
|
|
||||||
println!("Monitor Cloud done {} __________ ",&now);
|
|
||||||
return Err(e);
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let monitor_rules = MonitorRules::load(
|
|
||||||
&app_env.config.monitor_rules_path,
|
|
||||||
&app_env.config.monitor_rules_file,
|
|
||||||
&app_env.config.monitor_rules_format
|
|
||||||
);
|
|
||||||
if monitor_rules.rules.len() == 0 {
|
|
||||||
eprintln!("No monitor rules found");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let res = monitor_rules.run(cloud,app_env).await;
|
|
||||||
if verbose > 0 {
|
|
||||||
println!("Monitor Cloud done {} __________ ",&now);
|
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
@ -295,12 +279,14 @@ pub async fn main() -> BxDynResult<()> { //std::io::Result<()> {
|
|||||||
let args: Vec<String> = std::env::args().collect();
|
let args: Vec<String> = std::env::args().collect();
|
||||||
if args.len() > 1 {
|
if args.len() > 1 {
|
||||||
match args[1].as_str() {
|
match args[1].as_str() {
|
||||||
"-h" | "--help" =>
|
"-h" | "--help" => {
|
||||||
println!("{} USAGE: -c config-toml -e env.file",PKG_NAME),
|
println!("{} USAGE: -c config-toml -e env.file",PKG_NAME);
|
||||||
|
return Ok(());
|
||||||
|
},
|
||||||
"-v" | "--version" => {
|
"-v" | "--version" => {
|
||||||
println!("{} version: {}",PKG_NAME,PKG_VERSION);
|
println!("{} version: {}",PKG_NAME,PKG_VERSION);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
},
|
},
|
||||||
_ => println!("{}",PKG_NAME),
|
_ => println!("{}",PKG_NAME),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -326,24 +312,18 @@ pub async fn main() -> BxDynResult<()> { //std::io::Result<()> {
|
|||||||
}
|
}
|
||||||
let config = Config::new(config_content,debug);
|
let config = Config::new(config_content,debug);
|
||||||
let app_data_conn = AppDataConn::new("Zterton".to_string(),config.datastores_settings.to_owned(),"").await;
|
let app_data_conn = AppDataConn::new("Zterton".to_string(),config.datastores_settings.to_owned(),"").await;
|
||||||
if !app_data_conn.check_connections(config.datastores_settings.to_owned()).await {
|
if config.datastores_settings.len() > 0 {
|
||||||
println!("Error checking app data store connections");
|
if !app_data_conn.check_connections(config.datastores_settings.to_owned()).await {
|
||||||
}
|
println!("Error checking app data store connections");
|
||||||
|
}
|
||||||
|
}
|
||||||
if config.run_schedtasks {
|
if config.run_schedtasks {
|
||||||
for it in &config.schedtasks {
|
config.schedtasks.clone().iter().for_each(|it| {
|
||||||
if ! it.on_start {
|
if it.on_start {
|
||||||
continue;
|
let name:&'static str = Box::leak(format!("{}",&it.name).into_boxed_str());
|
||||||
|
tokio::spawn(async move {run_on_clouds(name).await});
|
||||||
}
|
}
|
||||||
match it.name.as_str() {
|
});
|
||||||
"monitor" => tokio::spawn(async {run_clouds_monitor().await}),
|
|
||||||
"check" => tokio::spawn(async {run_check_clouds().await}),
|
|
||||||
"cache" => tokio::spawn(async {run_cache_clouds().await}),
|
|
||||||
_ => {
|
|
||||||
eprintln!("Error task {} not defined",&it.name);
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if config.run_websrvrs {
|
if config.run_websrvrs {
|
||||||
for (pos,it) in config.websrvrs.iter().enumerate() {
|
for (pos,it) in config.websrvrs.iter().enumerate() {
|
||||||
@ -355,38 +335,18 @@ pub async fn main() -> BxDynResult<()> { //std::io::Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if config.run_schedtasks {
|
if config.run_schedtasks {
|
||||||
for it in config.schedtasks {
|
for it in config.schedtasks.clone() {
|
||||||
if it.schedule.is_empty() {
|
if it.schedule.is_empty() {
|
||||||
eprintln!("Task {} no schedule defined",&it.name);
|
eprintln!("Task {} no schedule defined",&it.name);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let res = match it.name.as_str() {
|
let res = sched.add(Job::new(&it.schedule.to_owned(), move |uuid, _l| {
|
||||||
"monitor" =>
|
if debug > 0 {
|
||||||
sched.add(Job::new(&it.schedule.to_owned(), move |uuid, _l| {
|
println!("Schedule {} {}: {}",&it.name,&it.schedule,uuid);
|
||||||
if debug > 0 {
|
}
|
||||||
println!("Schedule {} {}: {}",&it.name,&it.schedule,uuid);
|
let name:&'static str = Box::leak(format!("{}",&it.name).into_boxed_str());
|
||||||
}
|
tokio::spawn(async move {run_on_clouds(name).await});
|
||||||
tokio::spawn(async {run_clouds_monitor().await});
|
})?);
|
||||||
})?),
|
|
||||||
"check" =>
|
|
||||||
sched.add(Job::new(&it.schedule.to_owned(), move |uuid, _l| {
|
|
||||||
if debug > 0 {
|
|
||||||
println!("Schedule {} {}: {}",&it.name,&it.schedule,uuid);
|
|
||||||
}
|
|
||||||
tokio::spawn(async {run_check_clouds().await});
|
|
||||||
})?),
|
|
||||||
"cache" =>
|
|
||||||
sched.add(Job::new(&it.schedule.to_owned(), move |uuid, _l| {
|
|
||||||
if debug > 0 {
|
|
||||||
println!("Schedule {} {}: {}",&it.name,&it.schedule,uuid);
|
|
||||||
}
|
|
||||||
tokio::spawn(async {run_cache_clouds().await});
|
|
||||||
})?),
|
|
||||||
_ => {
|
|
||||||
eprintln!("Error task {} not defined",&it.name);
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
match res {
|
match res {
|
||||||
Ok(_) => { continue; },
|
Ok(_) => { continue; },
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@ -395,7 +355,7 @@ pub async fn main() -> BxDynResult<()> { //std::io::Result<()> {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let _= sched.start().await;
|
|
||||||
}
|
}
|
||||||
|
let _= sched.start().await;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user