Compare commits

..

No commits in common. "8a9fa96599a10b08004051dae4df5e396668b9ed" and "943154725aa8de99a5d11ffc5fefae3bcc2dc0d7" have entirely different histories.

3 changed files with 92 additions and 42 deletions

View File

@ -116,14 +116,12 @@ impl CollFilters {
let provision_path: &'static str = Box::leak(format!("{}provision",&self.prfx).into_boxed_str()); let provision_path: &'static str = Box::leak(format!("{}provision",&self.prfx).into_boxed_str());
let status_path: &'static str = Box::leak(format!("{}status",&self.prfx).into_boxed_str()); let status_path: &'static str = Box::leak(format!("{}status",&self.prfx).into_boxed_str());
let liveness_path: &'static str = Box::leak(format!("{}liveness",&self.prfx).into_boxed_str()); let liveness_path: &'static str = Box::leak(format!("{}liveness",&self.prfx).into_boxed_str());
let apps_path: &'static str = Box::leak(format!("{}apps",&self.prfx).into_boxed_str());
self.config(db.clone(),cloud.clone(),config_path,cors.clone()) self.config(db.clone(),cloud.clone(),config_path,cors.clone())
.or(self.defs(db.clone(),defs_path,cors.clone())) .or(self.defs(db.clone(),defs_path,cors.clone()))
.or(self.langs(db.clone(),lang_path,cors.clone())) .or(self.langs(db.clone(),lang_path,cors.clone()))
.or(self.provision(db.clone(),cloud.clone(),provision_path,cors.clone())) .or(self.provision(db.clone(),cloud.clone(),provision_path,cors.clone()))
.or(self.status(db.clone(),cloud.clone(),status_path,cors.clone())) .or(self.status(db.clone(),cloud.clone(),status_path,cors.clone()))
.or(self.liveness(db.clone(),cloud.clone(),liveness_path,cors.clone())) .or(self.liveness(db.clone(),cloud.clone(),liveness_path,cors.clone()))
.or(self.apps(db.clone(),cloud.clone(),apps_path,cors.clone()))
.boxed() .boxed()
} }
/// GET /ta?offset=3&limit=5 /// GET /ta?offset=3&limit=5
@ -401,31 +399,6 @@ impl CollFilters {
.and_then(handlers::h_config::liveness) .and_then(handlers::h_config::liveness)
.with(cors) .with(cors)
.boxed() .boxed()
}
/// GET /status?offset=3&limit=5
pub fn apps(
&self,
db: DataDBs,
cloud: Cloud,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryConfigFilters>())
.and(headers_cloned())
.and(method())
// .and_then(user_authentication)
// .and(warp::header::optional::<String>("authorization"))
// .and(warp::header::optional::<String>("accept-language"))
.and(self.with_db(db))
.and(warp::any().map(move || cloud.to_owned()))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_config::apps)
.with(cors)
.boxed()
} }
fn with_db(&self, db: DataDBs) -> impl Filter<Extract = (DataDBs,), Error = std::convert::Infallible> + Clone { fn with_db(&self, db: DataDBs) -> impl Filter<Extract = (DataDBs,), Error = std::convert::Infallible> + Clone {
warp::any().map(move || db.clone()) warp::any().map(move || db.clone())

View File

@ -7,8 +7,8 @@ use warp::{
}; };
use reqenv::ReqEnv; use reqenv::ReqEnv;
/* /*
use crate::app_profile::defs::{PrflStore,PrflData,PrflQueryFilters,Profile}; use crate::topographic_anatomy::defs::{TaStore,TaData,TaQueryFilters,TopographicAnatomy};
use defs::kloud::utils::{lng_t,get_lang_items_str,load_lang}; use zterton::kloud::utils::{lng_t,get_lang_items_str,load_lang};
//use zterton::models::{AppStore, AppData}; //use zterton::models::{AppStore, AppData};
use crate::defs::AppDB; use crate::defs::AppDB;
@ -23,6 +23,9 @@ use crate::auth::defs::{
custom_reject custom_reject
}; };
*/ */
// use ::topographic_anatomy::{TopographicAnatomy};
// use ::tracking_point::{TrackingPoint};
// use crate::tracking_point::defs::{TrackingPoint};
//use app_auth::{UserCtx}; //use app_auth::{UserCtx};
use kloud::{ use kloud::{
defs::{ defs::{
@ -59,7 +62,7 @@ pub async fn table(
} else { } else {
app_ctx = ""; app_ctx = "";
} }
let lang_items = serde_json::to_string(&LangItems::new("langs/prfl","es","yaml")) let lang_items = serde_json::to_string(&LangItems::new("langs/ta","es","yaml"))
.unwrap_or_else(|_| String::from("")); .unwrap_or_else(|_| String::from(""));
let mut data_hash: HashMap<String, String> = HashMap::new(); let mut data_hash: HashMap<String, String> = HashMap::new();
data_hash.insert("lang".to_string(), lang.to_owned()); data_hash.insert("lang".to_string(), lang.to_owned());
@ -142,7 +145,7 @@ pub async fn list(
// println!("User: {} | {}",&user.user_id,&user.token); // println!("User: {} | {}",&user.user_id,&user.token);
// println!("LANG: {}",language); // println!("LANG: {}",language);
// if prfx.as_str() == "prfl" { // if prfx.as_str() == "ta" {
// let cur = db.colls.ta.entries.read(); // let cur = db.colls.ta.entries.read();
// } // }
// let allow_origin = reqenv.websrvr().allow_origin; // let allow_origin = reqenv.websrvr().allow_origin;
@ -150,9 +153,9 @@ pub async fn list(
Ok(_auth) => { Ok(_auth) => {
// dbg!("auth: {}",&auth); // dbg!("auth: {}",&auth);
match prfx.as_str() { match prfx.as_str() {
"prfl" => { "ta" => {
/* let data = CollsData::get_ta_entries(db.colls.clone()).await; /* let data = CollsData::get_ta_entries(db.colls.clone()).await;
let data_out: Vec<Profile> = data.iter().enumerate().filter(|(idx,(itm,_))|{ let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false; let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; } if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; } if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
@ -175,6 +178,32 @@ pub async fn list(
// Ok(warp::reply::json(&result)) // Ok(warp::reply::json(&result))
*/ */
}, },
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
_ => { _ => {
//let result=""; //let result="";
let result = format!("list for '{}' undefined",&prfx); let result = format!("list for '{}' undefined",&prfx);
@ -231,7 +260,7 @@ pub async fn list(
} }
// warp::generic::Either<(std::string::String,), (std::string::String,)> // warp::generic::Either<(std::string::String,), (std::string::String,)>
// pub async fn create( // pub async fn create(
// create: Profile, // create: TopographicAnatomy,
// db: DataDBs, // db: DataDBs,
// prfx: String, // prfx: String,
// ) -> Result<impl Reply, Rejection> { // ) -> Result<impl Reply, Rejection> {
@ -269,8 +298,8 @@ pub async fn insert(
// dbg!("auth: {}",&auth); // dbg!("auth: {}",&auth);
/* /*
match prfx.as_str() { match prfx.as_str() {
"prfl" => { "ta" => {
match serde_json::from_str::<Profile>(&data.str_data) { match serde_json::from_str::<TopographicAnatomy>(&data.str_data) {
Ok(item) => { Ok(item) => {
if db.colls.ta.entries.read().contains_key(&data.id) { if db.colls.ta.entries.read().contains_key(&data.id) {
if data.id != item.id { if data.id != item.id {
@ -304,6 +333,32 @@ pub async fn insert(
} }
} }
}, },
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
match serde_json::from_str::<TrackingPoint>(&data.str_data) {
Ok(item) => {
if db.colls.tp.entries.read().contains_key(&data.id) {
db.colls.tp.entries.write().insert(item.id.to_owned(), item.to_owned());
println!("UPDATED tracking_point id: {}",&item.id);
result = format!("UPDATED: {}",&item.id);
} else {
db.colls.tp.entries.write().insert(item.id.to_owned(), item.to_owned());
println!("CREATED tracking_point id: {}",&item.id);
result = format!("CREATED: {}",&item.id);
}
},
Err(e) => {
println!("Error {} parse insert : {}",&prfx,e);
result = format!("ERROR: {} parse insert: '{}'",&prfx,e);
}
}
},
_ => {
result = format!("ERROR: list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
}
};
*/ */
}, },
Err(e) => { Err(e) => {
@ -339,7 +394,7 @@ pub async fn delete(
// dbg!("auth: {}",&auth); // dbg!("auth: {}",&auth);
/* /*
match prfx.as_str() { match prfx.as_str() {
"prfl" => { "ta" => {
if db.colls.ta.entries.read().contains_key(&data.id) { if db.colls.ta.entries.read().contains_key(&data.id) {
db.colls.ta.entries.write().remove(&data.id); db.colls.ta.entries.write().remove(&data.id);
if db.colls.ta.entries.read().contains_key(&data.id) { if db.colls.ta.entries.read().contains_key(&data.id) {
@ -353,6 +408,26 @@ pub async fn delete(
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id); result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
} }
}, },
"tp" => {
if db.colls.tp.entries.read().contains_key(&data.id) {
db.colls.tp.entries.write().remove(&data.id);
if db.colls.tp.entries.read().contains_key(&data.id) {
println!("NOT DELETED tracking_point id: {}",&data.id);
result = format!("ERROR: tracking_point NOT deleted: '{}'",&data.id);
} else {
println!("DELETED tracking_point id: {}",&data.id);
result = format!("DELETED: tracking_point: '{}'",&data.id);
}
} else {
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
}
},
_ => {
result = format!("ERROR: list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
}
};
*/ */
}, },
Err(e) => { Err(e) => {

View File

@ -8,9 +8,9 @@ use warp::{
use reqenv::ReqEnv; use reqenv::ReqEnv;
use app_env::profile::Profile; use app_env::profile::Profile;
/* /*
use crate::app_profile::defs::{TaStore,TaData,TaQueryFilters,Profile}; use crate::topographic_anatomy::defs::{TaStore,TaData,TaQueryFilters,TopographicAnatomy};
use defs::kloud::utils::{lng_t,get_lang_items_str,load_lang}; use zterton::kloud::utils::{lng_t,get_lang_items_str,load_lang};
//use defs::app_env::{AppStore, AppData}; //use zterton::models::{AppStore, AppData};
use crate::defs::AppDB; use crate::defs::AppDB;
use crate::auth::defs::{ use crate::auth::defs::{
@ -24,7 +24,9 @@ use crate::auth::defs::{
custom_reject custom_reject
}; };
*/ */
// use ::app_profile::{Profile}; // use ::topographic_anatomy::{TopographicAnatomy};
// use ::tracking_point::{TrackingPoint};
// use crate::tracking_point::defs::{TrackingPoint};
//use app_auth::{UserCtx}; //use app_auth::{UserCtx};
use kloud::{ use kloud::{
defs::{ defs::{
@ -63,7 +65,7 @@ pub async fn langs(
// dbg!("auth: {}",&auth); // dbg!("auth: {}",&auth);
let lang = opts.lang.unwrap_or_else(|| String::from("es")); let lang = opts.lang.unwrap_or_else(|| String::from("es"));
let section = opts.section.unwrap_or_else(|| String::from("")); let section = opts.section.unwrap_or_else(|| String::from(""));
let lang_items = LangItems::new("langs/prfl",&lang,"yaml"); let lang_items = LangItems::new("langs/ta",&lang,"yaml");
let result = lang_items.get_items_str(&section); let result = lang_items.get_items_str(&section);
Ok(warp::http::Response::builder() Ok(warp::http::Response::builder()
.body(result.to_string()) .body(result.to_string())