chore: add src & cargo files

This commit is contained in:
Jesús Pérez Lorenzo 2021-08-28 15:23:34 +01:00
parent 84ee02512a
commit 1588e7bec0
12 changed files with 6432 additions and 0 deletions

3937
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

79
Cargo.toml Normal file
View File

@ -0,0 +1,79 @@
[package]
name = "zterton"
version = "0.1.0"
authors = ["JesusPerez <jpl@jesusperez.pro>"]
edition = "2018"
publish = false
[profile.dev]
opt-level = 0
[profile.release]
opt-level = 3
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.40"
async-graphql = "2.8.4"
async-graphql-warp = "2.8.4"
async-jobs = "0.2.0"
async-std = "1.9.0"
tokio-cron-scheduler = "0.2.1"
base64 = "0.13.0"
bytes = "1.0.1"
casbin = "2.0.7"
chrono = "0.4.19"
envmnt = "0.9.0"
futures-util = "0.3.14"
http = "0.2.4"
#job_scheduler = "1.2.1"
# lazy_static = "1.4.0"
log = "0.4.14"
once_cell = "1.7.2"
parking_lot = "0.11.1"
pretty_env_logger = "0.4"
serde = { version = "1.0.125", features = ["derive"] }
serde_derive = "1.0.125"
serde_json = "1.0.64"
serde_yaml = "0.8.17"
slab = "0.4.3"
tera = "1.8.0"
thiserror = "1.0.24"
warp = { version = "0.3.1", features = ["default","websocket","tls","compression"] }
# salvo = { version = "0.5.5", features = ["full"] }
# salvo_extra = "0.5.5"
# salvo_macros = "0.5.5"
toml = "0.5.8"
uuid = { version = "0.8.2", features = ["serde", "v5"] }
url = "2.2.1"
tokio = { version = "1.5.0", features = ["full"] }
#zterton = { path = "../../karyo/osmosys" }
zterton = { version = "0.1.1", path = "../lib/zterton" }
app_tools = { version = "0.1.0", path = "../lib/utils/app_tools" }
app_env = { version = "0.1.0", path = "../lib/defs/app_env" }
app_auth = { version = "0.1.0", path = "../lib/defs/app_auth" }
app_errors = { version = "0.1.0", path = "../lib/defs/app_errors" }
kloud = { version = "0.1.0", path = "../lib/defs/kloud" }
gql_playground = { version = "0.1.0", path = "../lib/graphql/gql_playground" }
app_auth_handlers = { version = "0.1.0", path = "../lib/handlers/app_auth_handlers" }
app_file_filters = { version = "0.1.0", path = "../lib/filters/app_file_filters" }
app_file_handlers = { version = "0.1.0", path = "../lib/handlers/app_file_handlers" }
reqtasks = { version = "0.1.0", path = "../lib/handlers/reqtasks" }
reqenv = { version = "0.1.0", path = "../lib/handlers/reqenv" }
app_auth_filters = { version = "0.1.0", path = "../lib/filters/app_auth_filters" }
reject_filters = { version = "0.1.0", path = "../lib/filters/reject_filters" }
#kloud_entries_macro_derive = { path = "../lib/macros/kloud_entries_macro_derive" }
librecloud_kloud = { version = "0.1.0", path = "../lib/defs/librecloud/kloud" }
clds = { version = "0.1.0", path = "../lib/clds" }
#topographic_anatomy = { version = "0.1.0", path = "../lib/defs/bm/topographic_anatomy" }
#tracking_point = { version = "0.1.0", path = "../lib/defs/bm/tracking_point" }
key_of_life = { path = "../lib/key_of_life" }
[dev-dependencies]
pretty_assertions = "0.7.2"

104
src/defs.rs Normal file
View File

@ -0,0 +1,104 @@
use std::collections::{BTreeMap};
use app_env::{appenv::AppEnv, AppStore};
use app_auth::{AuthStore};
use kloud::{defs::KloudStore, datacontext::DataContext};
use kloud::utils::load_from_module;
use clds::clouds::defs::{
CloudEnv,
Cloud,
};
use librecloud_kloud::Kloud;
//use topographic_anatomy::TopographicAnatomy;
//use tracking_point::TrackingPoint;
#[derive(Clone,Default)]
pub struct CollsData {
pub klouds: KloudStore<Kloud>,
// pub ta: KloudStore<TopographicAnatomy>,
// pub tp: KloudStore<TrackingPoint>,
}
impl CollsData {
pub fn new(env: AppEnv) -> Self {
// dbg!(&env.contexts);
let (klouds_frmt, klouds_content) = load_from_module(env.to_owned(),"klouds");
// let (ta_frmt, ta_content) = load_from_module(env.to_owned(),"ta");
// let (tp_frmt, tp_content) = load_from_module(env.to_owned(),"tp");
Self {
klouds: KloudStore::new(
Kloud::entries(&klouds_content,&klouds_frmt),
"klouds".to_owned(),
DataContext::default()
),
/*
ta: KloudStore::new(
TopographicAnatomy::entries(&ta_content,&ta_frmt),
"ta".to_owned(),
DataContext::default()
),
tp: KloudStore::new(
TrackingPoint::entries(&tp_content,&tp_frmt),
"tp".to_owned(), DataContext::default())
*/
}
}
pub async fn get_klouds_entries(coll_map: CollsData) -> BTreeMap<String,Kloud> {
let mut result = BTreeMap::new();
let cur = coll_map.klouds.entries.read();
for (key,value) in cur.iter() {
result.insert(key.to_owned(), value.to_owned());
}
result
}
/*
pub async fn get_ta_entries(coll_map: CollsData) -> BTreeMap<String,TopographicAnatomy> {
let mut result = BTreeMap::new();
let cur = coll_map.ta.entries.read();
for (key,value) in cur.iter() {
result.insert(key.to_owned(), value.to_owned());
}
result
}
pub async fn get_tp_entries(coll_map: CollsData) -> BTreeMap<String,TrackingPoint> {
let mut result = BTreeMap::new();
let cur = coll_map.tp.entries.read();
for (key,value) in cur.iter() {
result.insert(key.to_owned(), value.to_owned());
}
result
}
*/
}
#[derive(Clone)]
pub struct AppDBs {
pub colls: CollsData,
pub app: AppStore,
}
#[derive(Clone)]
pub struct DataDBs {
pub colls: CollsData,
pub app: AppStore,
pub auth: AuthStore,
}
pub async fn load_cloud_env(cloud: &mut Cloud) {
let force: u8 = "-f".as_bytes()[0];
cloud.env = CloudEnv::new(force,load_key().await);
cloud.providers = Cloud::load_providers().await;
}
pub const KEY_PATH: &str = ".k";
use key_of_life::get_key;
pub async fn load_key() -> String {
let key_path = envmnt::get_or("KEY_PATH", KEY_PATH);
let key = get_key(&key_path,None).await;
if key.is_empty() {
std::process::exit(0x0100);
}
key
}

411
src/filters.rs Normal file
View File

@ -0,0 +1,411 @@
//use app_auth::{UserCtx};
use kloud::{
defs::{
KloudQueryFilters,
KloudQueryDefsFilters,
KloudQueryLangFilters,
// KloudReqData,
KloudQueryConfigFilters,
},
};
use crate::defs::DataDBs;
use crate::handlers;
use clds::clouds::defs::{Cloud};
use warp::{
filters::header::headers_cloned,
filters::method::method,
filters::BoxedFilter,
Filter,
};
/// The 4 ta filters combined.
// #[must_use]
// pub fn hello (
// //) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
// ) -> impl Filter<Extract = impl warp::Reply, Error = std::convert::Infallible> + Clone {
// warp::any().map(move|| "Hola, World!")
// }
/*
let default_auth = warp::any().map(|| {
// something default
});
let auth = warp::header("authorization")
.map(|token: String| {
// something with token
})
.or(default_auth)
.unify();
let context_extractor = warp::any().and(
warp::header::<String>("authorization")
.map(|token: String| -> Context {
let token_data = match verify_jwt(token) {
Ok(t) => t,
Err(_) => return Context { user_id: 0 },
};
Context {
user_id: token_data.claims.user_id,
}
})
.or(warp::any().map(|| Context { user_id: 0 }))
.unify(),
);
*/
#[derive(Clone,Debug,Default)]
pub struct CollFilters {
pub prfx: String, // &'static str,
}
impl CollFilters {
pub fn new(prfx: &str) -> Self {
Self {
prfx: String::from(prfx),
}
}
pub fn filters_defs(
&self,
db: DataDBs,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let defs_path: &'static str = Box::leak(format!("{}defs",&self.prfx).into_boxed_str());
let lang_path: &'static str = Box::leak(format!("{}lang",&self.prfx).into_boxed_str());
self.list(db.clone(),defs_path,cors.clone())
.or(self.langs(db.clone(),lang_path,cors.clone()))
.boxed()
}
pub fn filters_data(
&self,
db: DataDBs,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let list_path: &'static str = Box::leak(format!("{}data",&self.prfx).into_boxed_str());
// let table_path: &'static str = Box::leak(format!("{}table",&self.prfx).into_boxed_str());
let defs_path: &'static str = Box::leak(format!("{}defs",&self.prfx).into_boxed_str());
let lang_path: &'static str = Box::leak(format!("{}lang",&self.prfx).into_boxed_str());
let insert_path: &'static str = Box::leak(format!("{}insert",&self.prfx).into_boxed_str());
let delete_path: &'static str = Box::leak(format!("{}delete",&self.prfx).into_boxed_str());
self.list(db.clone(),list_path,cors.clone())
// .or(self.table(db.clone(),table_path,cors.clone()))
.or(self.defs(db.clone(),defs_path,cors.clone()))
.or(self.langs(db.clone(),lang_path,cors.clone()))
// for others use "x" with path! macro
.or(self.insert(db.clone(),insert_path,cors.clone()))
// .or(self.update(db.clone()))
.or(self.delete(db.clone(),delete_path,cors.clone()))
.boxed()
}
pub fn filters_config(
&self,
db: DataDBs,
cloud: Cloud,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let defs_path: &'static str = Box::leak(format!("{}defs",&self.prfx).into_boxed_str());
let lang_path: &'static str = Box::leak(format!("{}lang",&self.prfx).into_boxed_str());
let config_path: &'static str = Box::leak(format!("{}config",&self.prfx).into_boxed_str());
let provision_path: &'static str = Box::leak(format!("{}provision",&self.prfx).into_boxed_str());
let status_path: &'static str = Box::leak(format!("{}status",&self.prfx).into_boxed_str());
let liveness_path: &'static str = Box::leak(format!("{}liveness",&self.prfx).into_boxed_str());
self.config(db.clone(),cloud.clone(),config_path,cors.clone())
.or(self.defs(db.clone(),defs_path,cors.clone()))
.or(self.langs(db.clone(),lang_path,cors.clone()))
.or(self.provision(db.clone(),cloud.clone(),provision_path,cors.clone()))
.or(self.status(db.clone(),cloud.clone(),status_path,cors.clone()))
.or(self.liveness(db.clone(),cloud.clone(),liveness_path,cors.clone()))
.boxed()
}
/// GET /ta?offset=3&limit=5
pub fn list(
&self,
db: DataDBs,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryFilters>())
.and(headers_cloned())
.and(method())
// .and_then(user_authentication)
// .and(warp::header::optional::<String>("authorization"))
// .and(warp::header::optional::<String>("accept-language"))
.and(self.with_db(db))
.and(warp::any().map(move || prfx.to_owned()))
// and(self.with_auth(db.app.clone(),db.auth.clone()))
//.and(warp::any().map(move || &self.clone().db))
// .map(|method: warp::http::Method, path: warp::path::FullPath, headers: warp::http::HeaderMap, opts: TaQueryFilters, db: AppDB | {
// dbg!(&headers);
// dbg!(&path);
// let mut req = warp::http::Request::builder()
// .method(method)
// .uri(path.as_str())
// ;
// // // .body(body)
// // .expect("request builder");
// // { *req.headers_mut() = headers; }
// req
// })
.and_then(handlers::h_data::list)
.with(cors)
.boxed()
}
/*
/// GET /ta?offset=3&limit=5
pub fn table(
&self,
db: DataDBs,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryFilters>())
.and(headers_cloned())
.and(method())
.and(self.with_db(db))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_data::table)
.with(cors)
.boxed()
}
*/
/// GET /ta?offset=3&limit=5
pub fn defs(
&self,
db: DataDBs,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryDefsFilters>())
.and(headers_cloned())
.and(method())
.and(self.with_db(db))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_defs::defs)
.with(cors)
.boxed()
}
/// POST /ta with JSON body
pub fn langs(
&self,
db: DataDBs,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
// warp::any().and(
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
// .and(warp::query::<TaQueryFilters>())
// .and(warp::filters::query::raw())
// .or(warp::any().map(|| String::default()))
// .unify()
.and(warp::query::<KloudQueryLangFilters>())
.and(headers_cloned())
.and(method())
// .and(with_auth(db.app.clone(),db.auth.clone()))
.and(self.with_db(db))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_defs::langs)
.with(cors)
.boxed()
}
/// POST /ta with JSON body
pub fn insert(
&self,
db: DataDBs,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::post())
.and(warp::body::json())
.and(headers_cloned())
.and(method())
.and(self.with_db(db))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_data::insert)
.with(cors)
.boxed()
}
// /// PUT /ta/:id with JSON body
// pub fn update(
// &self,
// db: DataDBs,
// //) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
// ) -> BoxedFilter<(impl warp::Reply,)> {
// let prfx = self.prfx.to_owned();
// warp::path(prfx.to_owned()) // , String)
// .and(warp::path::param())
// .and(warp::put())
// .and(self.json_body())
// .and(self.with_db(db))
// .and(warp::any().map(move || prfx.to_owned()))
// .and_then(handlers::h_data::update)
// .boxed()
// }
/// DELETE /ta/:id
pub fn delete(
&self,
db: DataDBs,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
// We'll make one of our endpoints admin-only to show how authentication filters are used
// let admin_only = warp::header::exact("authorization", "Bearer admin");
let prfx = self.prfx.to_owned();
warp::path(path)
// .and(warp::path::param())
// It is important to put the auth check _after_ the path filters.
// If we put the auth check before, the request `PUT /ta/invalid-string`
// would try this filter and reject because the authorization header doesn't match,
// rather because the param is wrong for that other path.
// .and(admin_only)
// .and(warp::delete())
.and(warp::post())
.and(warp::body::json())
.and(headers_cloned())
.and(method())
.and(self.with_db(db))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_data::delete)
.with(cors)
.boxed()
}
/// GET /config?offset=3&limit=5
pub fn config(
&self,
db: DataDBs,
cloud: Cloud,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryConfigFilters>())
.and(headers_cloned())
.and(method())
// .and_then(user_authentication)
// .and(warp::header::optional::<String>("authorization"))
// .and(warp::header::optional::<String>("accept-language"))
.and(self.with_db(db))
.and(warp::any().map(move || cloud.to_owned()))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_config::config)
.with(cors)
.boxed()
}
/// GET /provision?offset=3&limit=5
pub fn provision(
&self,
db: DataDBs,
cloud: Cloud,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryConfigFilters>())
.and(headers_cloned())
.and(method())
// .and_then(user_authentication)
// .and(warp::header::optional::<String>("authorization"))
// .and(warp::header::optional::<String>("accept-language"))
.and(self.with_db(db))
.and(warp::any().map(move || cloud.to_owned()))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_config::provision)
.with(cors)
.boxed()
}
/// GET /status?offset=3&limit=5
pub fn status(
&self,
db: DataDBs,
cloud: Cloud,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryConfigFilters>())
.and(headers_cloned())
.and(method())
// .and_then(user_authentication)
// .and(warp::header::optional::<String>("authorization"))
// .and(warp::header::optional::<String>("accept-language"))
.and(self.with_db(db))
.and(warp::any().map(move || cloud.to_owned()))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_config::status)
.with(cors)
.boxed()
}
/// GET /status?offset=3&limit=5
pub fn liveness(
&self,
db: DataDBs,
cloud: Cloud,
path: &'static str,
cors: warp::cors::Builder,
//) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
) -> BoxedFilter<(impl warp::Reply,)> {
let prfx = self.prfx.to_owned();
warp::path(path)
.and(warp::get())
.and(warp::query::<KloudQueryConfigFilters>())
.and(headers_cloned())
.and(method())
// .and_then(user_authentication)
// .and(warp::header::optional::<String>("authorization"))
// .and(warp::header::optional::<String>("accept-language"))
.and(self.with_db(db))
.and(warp::any().map(move || cloud.to_owned()))
.and(warp::any().map(move || prfx.to_owned()))
.and_then(handlers::h_config::liveness)
.with(cors)
.boxed()
}
fn with_db(&self, db: DataDBs) -> impl Filter<Extract = (DataDBs,), Error = std::convert::Infallible> + Clone {
warp::any().map(move || db.clone())
}
// fn json_body(&self) -> impl Filter<Extract = (TopographicAnatomy,), Error = warp::Rejection> + Clone {
// // When accepting a body, we want a JSON body
// // (and to reject huge payloads)...
// warp::body::content_length_limit(1024 * 16).and(warp::body::json())
// }
}

172
src/graphql.rs Normal file
View File

@ -0,0 +1,172 @@
/*
//! Async-graphql integration with Warp
#![allow(clippy::type_complexity)]
#![forbid(unsafe_code)]
pub mod batch_request;
pub mod errors;
pub mod requests;
pub mod subscriptions;
pub mod filters;
pub use batch_request::{graphql_batch, graphql_batch_opts, BatchResponse};
pub use errors::BadRequest;
pub use requests::{graphql, graphql_opts, Response};
// pub use subscriptions::{graphql_subscription, graphql_subscription_with_data};
*/
use async_graphql::scalar;
use async_graphql::{EmptyMutation, EmptySubscription, MergedObject, Schema, Enum};
// use async_graphql_warp::{BadRequest, Response};
use async_graphql_warp::{Response};
use serde::{Serialize,Deserialize};
//use http::StatusCode;
use std::convert::Infallible;
use warp::{
filters::header::headers_cloned,
filters::method::method,
http::{method::Method, HeaderMap, HeaderValue},
// http::{header::AUTHORIZATION, method::Method, HeaderMap, HeaderValue},
// filters::path::{full, FullPath},
http::Response as HttpResponse,
Filter, filters::BoxedFilter
};
use gql_playground::{playground_source,GraphQLPlaygroundConfig};
//use async_graphql::http::{playground_source, GraphQLPlaygroundConfig};
//use crate::topographic_anatomy::graphql::{QueryRoot as TaQueryRoot};
// use crate::topographic_anatomy::{TopographicAnatomyQuery,TopographicAnatomyMutation}; // TopographicAnatomyChanged};
// use crate::tracking_point::{TrackingPointQuery,TrackingPointMutation}; // TrackingPointChanged};
// use zterton::auth::defs::{AuthStore};
// use zterton::auth::filters::{with_auth};
use crate::defs::{DataDBs};
use reqenv::ReqEnv;
#[derive(Enum, Eq, PartialEq, Copy, Clone)]
pub enum MutationType {
Created,
Updated,
Deleted,
}
impl Default for MutationType {
fn default() -> Self { MutationType::Updated }
}
pub type JsonMap = serde_json::Map<String, serde_json::Value>;
#[derive(MergedObject, Default)]
struct Query(
// TopographicAnatomyQuery,
// TrackingPointQuery,
);
#[derive(MergedObject, Default)]
struct Mutation(
// // TopographicAnatomyChanged,
// TopographicAnatomyMutation,
// // TrackingPointChanged,
// TrackingPointMutation,
);
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
pub struct Map {
pub data: JsonMap,
}
scalar!(Map);
pub async fn graphiql(
path: &'static str,
giql_path: &'static str,
data_dbs: DataDBs,
) -> BoxedFilter<(impl warp::Reply,)> {
// ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
if giql_path.is_empty() {
warp::path(path)
.and(warp::get())
.map(move || {
HttpResponse::builder()
.header("content-type", "text/html")
.body("".into())
})
.boxed()
} else {
warp::path(path)
.and(warp::get())
.and(headers_cloned())
.and(method())
.and(with_db(data_dbs))
// TODO check auth here
.map(move |_headers: HeaderMap<HeaderValue>, _method: Method, _db: DataDBs| {
let gql_playground_config: GraphQLPlaygroundConfig = GraphQLPlaygroundConfig::new(giql_path);
HttpResponse::builder()
.header("content-type", "text/html")
.body(playground_source(gql_playground_config))
// .body(playground_source(GraphQLPlaygroundConfig::new("/gql")))
})
.boxed()
}
// graphql_playground
// .or(graphql_post)
// .recover(|err: Rejection| async move {
// if let Some(BadRequest(err)) = err.find() {
// return Ok::<_, Infallible>(warp::reply::with_status(
// err.to_string(),
// StatusCode::BAD_REQUEST,
// ));
// }
// Ok(warp::reply::with_status(
// "INTERNAL_SERVER_ERROR".to_string(),
// StatusCode::INTERNAL_SERVER_ERROR,
// ))
// })
}
pub async fn graphql(
// config: &Config,
path: &'static str,
data_dbs: DataDBs,
cors: warp::cors::Builder,
) -> BoxedFilter<(impl warp::Reply,)> {
// ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
let schema = Schema::build( Query::default(), Mutation::default(), EmptySubscription)
.data(data_dbs.clone())
.finish();
// warp::path!("gql").and(async_graphql_warp::graphql(schema)).and_then(
warp::path(path)
.and(headers_cloned())
.and(method())
.and(with_db(data_dbs))
.and(async_graphql_warp::graphql(schema)).and_then(
| headers: HeaderMap<HeaderValue>, method: Method, db: DataDBs,
(schema, request): (
Schema<Query, Mutation, EmptySubscription>,
async_graphql::Request,
)| async move {
let reqenv = ReqEnv::new(db.app, db.auth, headers, method, "/gql", "gql", "gql");
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
Ok::<_, Infallible>(Response::from(schema.execute(request).await))
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("gql:{} {}",&result,e);
let empty_schema = Schema::build( Query::default(), EmptyMutation, EmptySubscription)
.finish();
Ok::<_, Infallible>(Response::from(empty_schema.execute(request).await))
}
}
}
)
.with(cors)
.boxed()
}
fn with_db(db: DataDBs) -> impl Filter<Extract = (DataDBs,), Error = std::convert::Infallible> + Clone {
warp::any().map(move || db.clone())
}

3
src/handlers.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod h_config;
pub mod h_data;
pub mod h_defs;

564
src/handlers/h_config.rs Normal file
View File

@ -0,0 +1,564 @@
// use std::convert::Infallible;
// use std::collections::HashMap;
use warp::{
// http::{StatusCode},
http::{method::Method, HeaderMap, HeaderValue},
Reply, Rejection,
};
use reqenv::ReqEnv;
// use app_env::profile::Profile;
// use app_env::config::Config;
use kloud::{
defs::{
KloudQueryConfigFilters,
},
};
use clds::clouds::defs::{Cloud};
use crate::defs::{DataDBs}; // ,CollsData};
use clds::clouds::on_clouds::{on_cloud_req,on_cloud_name_req};
pub async fn cloud_req(reqname: &str,cloud: &Cloud,reqenv: &ReqEnv,opts: &KloudQueryConfigFilters) -> String {
let result: String;
if opts.grp.is_empty() {
result = on_cloud_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&opts.cld).await;
} else if opts.name.is_empty() {
let source = format!("{}/{}",&opts.cld,&opts.grp);
result = on_cloud_name_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&source).await;
} else {
let source = format!("{}/{}/{}",&opts.cld,&opts.grp,&opts.name);
result = on_cloud_name_req(&reqname,&cloud,&reqenv,&opts.tsksrvcs,&opts.srvrs,&source).await;
}
match reqname {
"config" => {
result.to_owned()
},
"status" => {
result.to_owned()
},
"provision" => {
result.to_owned()
},
"liveness" => {
result.to_owned()
},
_ => {
result.to_owned()
}
}
}
pub async fn config (
opts: KloudQueryConfigFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
cloud: Cloud,
prfx: String,
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
// dbg!("{:#?}",&db);
// dbg!("{:#?}",&header);
// dbg!("{:#?}",&opts);
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/config", "config", &prfx);
// get_pkgs_vers(&mut cloud).await?;
// cloud.env.listhosts = String::from(listhosts);
// if let Some(lang_str) = header.get("accept-language") {
// println!("{:?}",&lang_str);
// }
// println!("User: {} | {}",&user.user_id,&user.token);
// println!("LANG: {}",language);
// if prfx.as_str() == "ta" {
// let cur = db.colls.ta.entries.read();
// }
// let allow_origin = reqenv.config().allow_origin;
// let result = cloud_req("config",&cloud,&reqenv,&opts).await;
// println!("Result: {}",&result);
// return Ok(warp::http::Response::builder()
// .body(result.to_string())
// .into_response())
// ;
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
match prfx.as_str() {
"kloud" => {
let result = cloud_req("config",&cloud,&reqenv,&opts).await;
// println!("Result: {}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
},
"ta" => {
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
_ => {
//let result="";
let result = format!("list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
}
pub async fn provision (
opts: KloudQueryConfigFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
cloud: Cloud,
prfx: String,
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
// dbg!("{:#?}",&db);
// dbg!("{:#?}",&header);
// dbg!("{:#?}",&opts);
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/provision", "provision", &prfx);
// if let Some(lang_str) = header.get("accept-language") {
// println!("{:?}",&lang_str);
// }
// println!("User: {} | {}",&user.user_id,&user.token);
// println!("LANG: {}",language);
// if prfx.as_str() == "ta" {
// let cur = db.colls.ta.entries.read();
// }
// let allow_origin = reqenv.config().allow_origin;
// let test = true;
// if test == true {
let result = cloud_req("provision",&cloud,&reqenv,&opts).await;
println!("Result: {}",&result);
// return Ok(warp::http::Response::builder()
// .body(result.to_string())
// .into_response());
// } else {
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
match prfx.as_str() {
"kloud" => {
let result = cloud_req("provision",&cloud,&reqenv,&opts).await;
println!("Result: {}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
},
"ta" => {
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
_ => {
//let result="";
let result = format!("list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
// }
}
pub async fn status (
opts: KloudQueryConfigFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
cloud: Cloud,
prfx: String,
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
// dbg!("{:#?}",&db);
// dbg!("{:#?}",&header);
// dbg!("{:#?}",&opts);
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/status", "status", &prfx);
// if let Some(lang_str) = header.get("accept-language") {
// println!("{:?}",&lang_str);
// }
// println!("User: {} | {}",&user.user_id,&user.token);
// println!("LANG: {}",language);
// if prfx.as_str() == "ta" {
// let cur = db.colls.ta.entries.read();
// }
// let allow_origin = reqenv.config().allow_origin;
// let result = cloud_req("status",&cloud,&reqenv,&opts).await;
// println!("Result: {}",&result);
// return Ok(warp::http::Response::builder()
// .body(result.to_string())
// .into_response());
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
match prfx.as_str() {
"kloud" => {
let result = cloud_req("status",&cloud,&reqenv,&opts).await;
println!("Result: {}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
},
"ta" => {
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
_ => {
//let result="";
let result = format!("list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
}
pub async fn liveness (
opts: KloudQueryConfigFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
cloud: Cloud,
prfx: String,
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
// dbg!("{:#?}",&db);
// dbg!("{:#?}",&header);
// dbg!("{:#?}",&opts);
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/liveness", "liveness", &prfx);
// if let Some(lang_str) = header.get("accept-language") {
// println!("{:?}",&lang_str);
// }
// println!("User: {} | {}",&user.user_id,&user.token);
// println!("LANG: {}",language);
// if prfx.as_str() == "ta" {
// let cur = db.colls.ta.entries.read();
// }
// let allow_origin = reqenv.config().allow_origin;
// let test = true;
// if test == true {
// let result = cloud_req("liveness",&cloud,&reqenv,&opts).await;
// println!("Result: {}",&result);
// return Ok(warp::http::Response::builder()
// .body(result.to_string())
// .into_response());
// } else {
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
match prfx.as_str() {
"kloud" => {
let result = cloud_req("liveness",&cloud,&reqenv,&opts).await;
println!("Result: {}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
},
"ta" => {
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
_ => {
//let result="";
let result = format!("list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
// }
}

442
src/handlers/h_data.rs Normal file
View File

@ -0,0 +1,442 @@
// use std::convert::Infallible;
// use std::collections::HashMap;
use warp::{
// http::{StatusCode},
http::{method::Method, HeaderMap, HeaderValue},
Reply, Rejection,
};
use reqenv::ReqEnv;
/*
use crate::topographic_anatomy::defs::{TaStore,TaData,TaQueryFilters,TopographicAnatomy};
use zterton::kloud::utils::{lng_t,get_lang_items_str,load_lang};
//use zterton::models::{AppStore, AppData};
use crate::defs::AppDB;
use crate::auth::defs::{
AuthError,
Sessions,
UserCtx,
UserMap,
WebResult,
SharedEnforcer,
BEARER_PREFIX,
custom_reject
};
*/
// use ::topographic_anatomy::{TopographicAnatomy};
// use ::tracking_point::{TrackingPoint};
// use crate::tracking_point::defs::{TrackingPoint};
//use app_auth::{UserCtx};
use kloud::{
defs::{
KloudQueryFilters,
KloudReqData,
},
};
use crate::defs::{DataDBs}; // ,CollsData};
/*
|method: http::Method, path: warp::path::FullPath, headers: http::HeaderMap, body| {
let mut reqenv = http::Request::builder()
.method(method)
.uri(path.as_str())
.body(body)
.expect("request builder");
{ *reqenv.headers_mut() = headers; }
req
*/
/*
pub async fn table(
opts: KloudQueryFilters,
header: HeaderMap<HeaderValue>,
method: Method,
db: DataDBs,
prfx: String,
) -> Result<impl Reply, Rejection> {
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "", "table", &prfx);
let lang = reqenv.lang();
let mut ctx = reqenv.ctx();
let req_context = "";
let app_ctx: &str;
if ! req_context.is_empty() && req_context != reqenv.config().default_module.as_str() {
app_ctx = req_context;
} else {
app_ctx = "";
}
let lang_items = serde_json::to_string(&LangItems::new("langs/ta","es","yaml"))
.unwrap_or_else(|_| String::from(""));
let mut data_hash: HashMap<String, String> = HashMap::new();
data_hash.insert("lang".to_string(), lang.to_owned());
data_hash.insert("lang_txt".to_string(), lang_items.to_owned());
// let allow_origin = reqenv.config().allow_origin;
match reqenv.user_authentication().await {
Ok(auth) => {
dbg!("auth: {}",&auth);
// let mut res = String::from("");
// let res = if let Some(name) = query.get("name") {
// submitted form
match reqenv.req
.render_page(
&mut ctx,
reqenv.config().templates_path.as_str(),
"ta_table/index.html",
"index.html",
format!("ta_table/{}.toml", lang.to_owned())
.to_owned()
.as_str(),
&mut data_hash,
app_ctx,
)
.await {
Ok(page) =>
Ok(warp::http::Response::builder()
.body(page)
.into_response()),
/*
Ok(warp::reply::with_header(
warp::http::Response::new(page),
"Access-Control-Allow-Origin",
&allow_origin)),
*/
Err(err) =>
Ok(warp::http::Response::builder()
.body(err.to_string())
.into_response()),
/*
Ok(warp::reply::with_header(
warp::http::Response::new(err.to_string()),
"Access-Control-Allow-Origin",
&allow_origin)),
*/
}
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
}
*/
pub async fn list(
_opts: KloudQueryFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
prfx: String,
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
// dbg!("{:#?}",&db);
// dbg!("{:#?}",&header);
// dbg!("{:#?}",&opts);
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/list", "list", &prfx);
// if let Some(lang_str) = header.get("accept-language") {
// println!("{:?}",&lang_str);
// }
// println!("User: {} | {}",&user.user_id,&user.token);
// println!("LANG: {}",language);
// if prfx.as_str() == "ta" {
// let cur = db.colls.ta.entries.read();
// }
// let allow_origin = reqenv.config().allow_origin;
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
match prfx.as_str() {
"ta" => {
/* let data = CollsData::get_ta_entries(db.colls.clone()).await;
let data_out: Vec<TopographicAnatomy> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_ta_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
/* let data = CollsData::get_tp_entries(db.colls.clone()).await;
let data_out: Vec<TrackingPoint> = data.iter().enumerate().filter(|(idx,(itm,_))|{
let mut skip = false;
if !opts.id.is_empty() && !itm.contains(&opts.id) { skip = true; }
if opts.start > 0 && idx < &(opts.start as usize) { skip = true; }
if opts.end > 0 && idx > &(opts.end as usize) { skip = true; }
!skip
}).map(|(_,(_,value))| value).cloned().collect();
*/
let data_out: Vec<String> = Vec::new();
let result = serde_json::to_string(&data_out).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&CollsData::get_tp_entries(db.colls.clone()).await),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
_ => {
//let result="";
let result = format!("list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
// warp::http::Response::new(body),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
// warp::reply::json(&""),
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
},
}
/*
let db_appdata = get_db_appdata().await;
let reqtasks = ReqEnv::new(&db_appdata, req, "data");
// if init at request
//if let Err(e) = set_ta_data().await {
if set_ta_data(&reqtasks.env()).await.is_err() {
res.render_html_text("Error");
}
*/
// warp::reply::json(&result),
// let body: String = "".to_string();
// Ok(warp::reply::with_header(
// warp::reply::json(&result),
// // warp::http::Response::new(body),
// "Access-Control-Allow-Origin",
// &allow_origin))
// Ok(warp::reply::json(&result))
}
// warp::generic::Either<(std::string::String,), (std::string::String,)>
// pub async fn create(
// create: TopographicAnatomy,
// db: DataDBs,
// prfx: String,
// ) -> Result<impl Reply, Rejection> {
// /*
// log::debug!("create_ta: {:?}", create);
// let mut vec = db.ta.lock().await;
// for ta in vec.iter() {
// if ta.id == create.id {
// log::debug!(" -> id already exists: {}", create.id);
// // ta with id already exists, return `400 BadRequest`.
// return Ok(StatusCode::BAD_REQUEST);
// }
// }
// // No existing ta with id, so insert and return `201 Created`.
// vec.push(create);
// */
// Ok(StatusCode::CREATED)
// }
pub async fn insert(
_data: KloudReqData,
header: HeaderMap<HeaderValue>,
method: Method,
db: DataDBs,
prfx: String,
) -> Result<impl Reply, Rejection> {
#[allow(unused_mut)]
let mut result = String::from("");
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/insert", "insert", &prfx);
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
/*
match prfx.as_str() {
"ta" => {
match serde_json::from_str::<TopographicAnatomy>(&data.str_data) {
Ok(item) => {
if db.colls.ta.entries.read().contains_key(&data.id) {
if data.id != item.id {
db.colls.ta.entries.write().remove(&data.id);
println!("DELETE topographic_anatomy id: {}",&data.id);
}
db.colls.ta.entries.write().insert(item.id.to_owned(), item.to_owned());
println!("UPDATED topographic_anatomy id: {}",&item.id);
result = format!("UPDATED: {}",&item.id);
} else {
db.colls.ta.entries.write().insert(item.id.to_owned(), item.to_owned());
println!("CREATED topographic_anatomy id: {}",&item.id);
result = format!("CREATED: {}",&item.id);
}
// TODO create a job
/*
let ta_data = CollsData::get_ta_entries(db.colls.clone()).await;
let ta_data_dump = serde_yaml::to_string(&ta_data).unwrap_or_else(|_| String::from(""));
if ta_data_dump != "" {
let module = reqenv.module();
let ta_path = format!("{}/{}.{}",&module.store_root,&module.store_path,&module.store_frmt);
let _fs_res = tokio::fs::write(&ta_path, ta_data_dump).await.map_err(|e| {
eprint!("error writing file: {}", e);
});
}
*/
},
Err(e) => {
println!("Error parse insert topographic_anatomy: {}",e);
result = format!("Error parse ta: {}",&e);
}
}
},
"tp" => {
// let result = CollsData::get_tp_entries(db.colls.clone()).await;
match serde_json::from_str::<TrackingPoint>(&data.str_data) {
Ok(item) => {
if db.colls.tp.entries.read().contains_key(&data.id) {
db.colls.tp.entries.write().insert(item.id.to_owned(), item.to_owned());
println!("UPDATED tracking_point id: {}",&item.id);
result = format!("UPDATED: {}",&item.id);
} else {
db.colls.tp.entries.write().insert(item.id.to_owned(), item.to_owned());
println!("CREATED tracking_point id: {}",&item.id);
result = format!("CREATED: {}",&item.id);
}
},
Err(e) => {
println!("Error {} parse insert : {}",&prfx,e);
result = format!("ERROR: {} parse insert: '{}'",&prfx,e);
}
}
},
_ => {
result = format!("ERROR: list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
}
};
*/
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}: {}",&result,e);
},
}
let json_res = format!("{{ \"res\": \"{}\"}}", result);
Ok(warp::http::Response::builder()
.body(json_res.to_string())
.into_response())
// If the for loop didn't return OK, then the ID doesn't exist...
// Ok(StatusCode::NOT_FOUND)
}
pub async fn delete(
data: KloudReqData,
header: HeaderMap<HeaderValue>,
method: Method,
db: DataDBs,
prfx: String,
) -> Result<impl Reply, Rejection> {
if data.id.is_empty() {
return Ok(warp::http::Response::builder()
.body("ERROR: no data".to_string())
.into_response());
}
let result: String;
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/delete", "delete", &prfx);
match reqenv.user_authentication().await {
Ok(_auth) => {
result = String::from("DONE");
// dbg!("auth: {}",&auth);
/*
match prfx.as_str() {
"ta" => {
if db.colls.ta.entries.read().contains_key(&data.id) {
db.colls.ta.entries.write().remove(&data.id);
if db.colls.ta.entries.read().contains_key(&data.id) {
println!("NOT DELETED topographic_anatomy id: {}",&data.id);
result = format!("ERROR: topographic_anatomy NOT deleted: '{}'",&data.id);
} else {
println!("DELETED topographic_anatomy id: {}",&data.id);
result = format!("DELETED: topographic_anatomy: '{}'",&data.id);
}
} else {
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
}
},
"tp" => {
if db.colls.tp.entries.read().contains_key(&data.id) {
db.colls.tp.entries.write().remove(&data.id);
if db.colls.tp.entries.read().contains_key(&data.id) {
println!("NOT DELETED tracking_point id: {}",&data.id);
result = format!("ERROR: tracking_point NOT deleted: '{}'",&data.id);
} else {
println!("DELETED tracking_point id: {}",&data.id);
result = format!("DELETED: tracking_point: '{}'",&data.id);
}
} else {
result = format!("ERROR: topographic_anatomy NOT found: '{}'",&data.id);
}
},
_ => {
result = format!("ERROR: list for '{}' undefined",&prfx);
// (AuthError::UserNotFoundError.to_string())
println!("{}",&result);
}
};
*/
},
Err(e) => {
result = format!("Error: no credentials found");
println!("{}",e);
},
}
let json_res = format!("{{ \"res\": \"{}\"}}", result);
Ok(warp::http::Response::builder()
.body(json_res.to_string())
.into_response())
}

162
src/handlers/h_defs.rs Normal file
View File

@ -0,0 +1,162 @@
// use std::convert::Infallible;
// use std::collections::HashMap;
use warp::{
// http::{StatusCode},
http::{method::Method, HeaderMap, HeaderValue},
Reply, Rejection,
};
use reqenv::ReqEnv;
use app_env::profile::Profile;
/*
use crate::topographic_anatomy::defs::{TaStore,TaData,TaQueryFilters,TopographicAnatomy};
use zterton::kloud::utils::{lng_t,get_lang_items_str,load_lang};
//use zterton::models::{AppStore, AppData};
use crate::defs::AppDB;
use crate::auth::defs::{
AuthError,
Sessions,
UserCtx,
UserMap,
WebResult,
SharedEnforcer,
BEARER_PREFIX,
custom_reject
};
*/
// use ::topographic_anatomy::{TopographicAnatomy};
// use ::tracking_point::{TrackingPoint};
// use crate::tracking_point::defs::{TrackingPoint};
//use app_auth::{UserCtx};
use kloud::{
defs::{
KloudQueryDefsFilters,
KloudQueryLangFilters,
},
lang::LangItems,
};
use crate::defs::{DataDBs}; // ,CollsData};
// warp::generic::Either<(std::string::String,), (std::string::String,)>
pub async fn langs(
opts: KloudQueryLangFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
prfx: String,
// headers: warp::http::HeaderMap
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/langs", "langs", &prfx);
// println!("User: {} | {}",&user.user_id,&user.token);
// if let Some(lang) = reqtasks.params().get("lang") {
// res.render_json_text(&get_lang_items_str("langs",req_lang,"yaml"))
// } else {
// res.render_json_text(&get_lang_items_str("langs",&reqtasks.lang(),"yaml"))
// }
// log::debug!("LANG: {} - {}",language, lang);
// dbg!("LANG: {} - {}",language, lang);
// let allow_origin = reqenv.config().allow_origin;
match reqenv.user_authentication().await {
Ok(_auth) => {
// dbg!("auth: {}",&auth);
let lang = opts.lang.unwrap_or_else(|| String::from("es"));
let section = opts.section.unwrap_or_else(|| String::from(""));
let lang_items = LangItems::new("langs/ta",&lang,"yaml");
let result = lang_items.get_items_str(&section);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
// Ok(warp::reply::json(&result))
*/
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
}
}
}
// warp::generic::Either<(std::string::String,), (std::string::String,)>
pub async fn defs(
_opts: KloudQueryDefsFilters,
header: HeaderMap<HeaderValue>,
method: Method,
// user: UserCtx,
db: DataDBs,
prfx: String,
// headers: warp::http::HeaderMap
// path: warp::path::FullPath, headers: warp::http::HeaderMap
) -> Result<impl Reply, Rejection> {
let reqenv = ReqEnv::new(db.app, db.auth, header, method, "/defs", "defs", &prfx);
// let allow_origin = reqenv.config().allow_origin;
match reqenv.user_authentication().await {
Ok(auth) => {
// dbg!("auth: {}",&auth);
// println!("User: {} | {}",&user.user_id,&user.token);
// if let Some(lang) = reqtasks.params().get("lang") {
// res.render_json_text(&get_lang_items_str("langs",req_lang,"yaml"))
// } else {
// res.render_json_text(&get_lang_items_str("langs",&reqtasks.lang(),"yaml"))
// }
// log::debug!("LANG: {} - {}",language, lang);
// dbg!("LANG: {} - {}",language, lang);
let mut path = format!("{}/profiles/{}/{}/defs.yaml",reqenv.config().resources_path,&prfx,&auth.user_id);
if ! std::path::Path::new(&path).exists() {
path = format!("{}/profiles/{}/defs.yaml",reqenv.config().resources_path,&prfx);
}
let content = Profile::load_fs_content(path.into());
// let lang = opts.lang.unwrap_or_else(|| String::from("es"));
// let section = opts.section.unwrap_or_else(|| String::from(""));
// let lang_items = LangItems::new("langs/ta",&lang,"yaml");
// let result = lang_items.get_items_str(&section);
let res = Profile::to_yaml(content); // String::from("");
let result = serde_json::to_string(&res).unwrap_or_else(|_| String::from(""));
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
// warp::reply::json(&res),
// Ok(warp::reply::with_header(
// warp::http::Response::new(result),
// "Access-Control-Allow-Origin",
// &allow_origin))
// Ok(warp::reply::json(&result))
},
Err(e) => {
let result = format!("Error: no credentials found");
println!("{}",e);
Ok(warp::http::Response::builder()
.body(result.to_string())
.into_response())
/*
Ok(warp::reply::with_header(
warp::http::Response::new(result),
"Access-Control-Allow-Origin",
&allow_origin))
*/
}
}
}

95
src/klouds.rs Normal file
View File

@ -0,0 +1,95 @@
use async_graphql::{Context, Object, Result};
use crate::defs::DataDBs;
use crate::graphql::{Map,MutationType};
use klouds::Kloud;
use crate::filters::CollFilters;
pub fn load_klouds_filters() -> CollFilters {
CollFilters {
prfx: String::from("klouds"),
}
}
#[derive(Clone,Default)]
pub struct KloudMainQuery;
#[Object]
impl KloudMainQuery {
async fn klouds(&self, ctx: &Context<'_>) -> Vec<Kloud> {
let data_dbs = ctx.data_unchecked::<DataDBs>();
let cur = data_dbs.colls.klouds.entries.read();
// let mut result: Vec<KloudMain> = Vec::new();
// for (_,value) in cur.iter() {
// result.push(value.to_owned());
// }
cur.values().cloned().collect()
// for (_,value) in cur.iter() {
// result.push(value.to_owned());
// }
//result
// books.iter().map(|(_, book)| book).cloned().collect()
}
}
#[derive(Clone,Default)]
pub struct KloudMainChanged {
pub mutation_type: MutationType,
pub id: String,
pub objmap: Map,
}
#[Object]
impl KloudMainChanged {
async fn mutation_type(&self) -> MutationType {
self.mutation_type
}
async fn id(&self) -> &String {
&self.id
}
async fn objmap(&self) -> &Map {
&self.objmap
}
async fn klouds(&self, ctx: &Context<'_>) -> Result<Option<Kloud>> {
let data_dbs = ctx.data_unchecked::<DataDBs>();
// TODO Parse self.objmap
match self.mutation_type {
MutationType::Created =>
data_dbs.colls.klouds.entries.write()
.insert(self.id.to_owned(), Kloud::default()),
MutationType::Updated =>
data_dbs.colls.klouds.entries.write()
.insert(self.id.to_owned(), Kloud::default()),
MutationType::Deleted =>
data_dbs.colls.klouds.entries.write().remove(&self.id),
};
// let entry = data_dbs.data.ta.ta_entries.
// let entry = d
// let id = &self.id; // .parse::<usize>()?;
// for (_,value) in cur.iter() {
// result.push(value.to_owned());
// }
// let books = ctx.data_unchecked::<Storage>().lock().await;
// let id = self.id.parse::<usize>()?;
// Ok(books.get(id).cloned())
Ok(Some(Kloud::default()))
}
}
/*
https://github.com/async-graphql/examples/blob/b36b5c44543b7323cb199ed229ea247e83b85d18/models/books/src/lib.rs
https://blog.logrocket.com/creating-a-rest-api-in-rust-with-warp/
store.grocery_list.write().insert(item.name, item.quantity);
Ok(warp::reply::with_status(
"Added items to the grocery list",
http::StatusCode::CREATED,
))
store.grocery_list.write().remove(&id.name);
Ok(warp::reply::with_status(
"Removed item from grocery list",
http::StatusCode::OK,
))
*/

349
src/main.rs Normal file
View File

@ -0,0 +1,349 @@
use std::sync::atomic::{AtomicUsize, Ordering};
use app_env::{
AppStore,
appenv::AppEnv,
appinfo::AppInfo,
appdata::AppData,
config::Config,
};
use app_auth::AuthStore;
use reject_filters::{handle_rejection};
// use zterton::models::{Terton};
//use std::fs; //, io};
// use std::fs::OpenOptions;
// use std::io::Write;
//use std::path::Path;
// use serde_yaml::Value;
use anyhow::{Result};
// use std::env;
//use warp::{http::Response as HttpResponse, Filter, filters::BoxedFilter};
use warp::{
// http::{StatusCode},
http::{method::Method, HeaderMap},
Filter,
};
// use warp::filters::header::headers_cloned;
// use warp::path::FullPath;
// use warp::http::{Uri, HeaderMap, HeaderValue};
//use crate::utils::set_ta_data;
use crate::defs::{DataDBs,CollsData,load_cloud_env};
use clds::clouds::defs::{
Cloud,
};
// use clds::app_env::config::Config;
use clds::clouds::on_clouds::{make_cloud_cache,run_clouds_check};
use reqenv::ReqEnv;
// #[macro_use]
// extern crate kloud_entries_macro_derive;
static WEBSERVER: AtomicUsize = AtomicUsize::new(0);
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
// const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION");
const AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
pub mod defs;
pub mod graphql;
pub mod filters;
pub mod handlers;
// pub const MODEL_PATH: &String = String::from("./auth/auth_model.conf");
// pub const POLICY_PATH: &String = String::from("./auth/policy.csv");
async fn create_auth_store(app_env: &AppEnv,verbose: &str) -> AuthStore {
let model_path = app_env.config.st_auth_model_path();
let policy_path = app_env.config.st_auth_policy_path();
AuthStore::new(&app_env.config,AuthStore::create_enforcer(model_path,policy_path).await,&verbose)
}
async fn up_web_server() -> Result<()> {
let webserver_status = WEBSERVER.load(Ordering::Relaxed);
let zterton_env = envmnt::get_or("ZTERTON", "UNKNOWN");
let verbose = envmnt::get_or("WEB_SERVER_VERBOSE", "");
if webserver_status != 0 {
if verbose != "quiet" {
println!("ZTerton web services at {}",&zterton_env);
}
// envmnt::set("ZTERTON", "WEBSERVER");
return Ok(());
/*
let app = Zterton::new(
app_env.config.srv_protocol.to_owned(),
app_env.config.srv_host.to_owned(),
app_env.config.srv_port,
);
let serverstring = format!("{}:{}",&srv_host,&srv_port);
match std::net::TcpStream::connect(&serverstring) {
Ok(_serverstream) => {
Ok(())
Err(anyhow!("Source {}: Connection to '{}' for tsksrvc '{}' failed: {}",&source,&serverstring,&tsk_name,&e))
},
Err(e) => {
// handle_input(serverstream);
}
}
*/
}
WEBSERVER.store(1,Ordering::Relaxed);
let mut app_env = AppEnv::default();
app_env.info = AppInfo::new(
"Zterton",
format!("version: {}",VERSION),
format!("Authors: {}",AUTHORS),
).await;
println!("Web services: init {} ___________ ", chrono::Utc::now().timestamp());
zterton::init_app(&mut app_env,"").await?;
// TODO pass root file-name frmt from AppEnv Config
// if init at load
// set_ta_data(&app_env).await?;
let (app, socket) = zterton::start_web(&mut app_env).await;
println!("Load app store ...");
let app_store = AppStore::new(AppData::new(app_env.to_owned()));
// As static casbin
println!("Load auth store ...");
let auth_store = create_auth_store(&app_env,"").await;
// dbg!(&auth_store.users.read().await);
// dbg!(&auth_store.shadows.read().await);
println!("Load data store ...");
let data_dbs = DataDBs {
colls: CollsData::new(app_env.to_owned()),
app: app_store.to_owned(),
auth: auth_store.to_owned(),
};
println!("Load web filters ...");
// let store = warp::any().map(move || ta_store.clone());
//let routes = warp::any().map(|| "Hello, World!");
// let us get some static boxes from config values:
let log_name = app_env.config.st_log_name();
// Path for static files
let html_path = app_env.config.st_html_path();
// If not graphQL comment/remove next line
let gql_path = app_env.config.st_gql_req_path();
// If not graphiQL comment/remove next line Interface GiQL
let giql_path = app_env.config.st_giql_req_path();
let origins: Vec<&str> = app_env.config.allow_origin.iter().map(AsRef::as_ref).collect();
let cors = warp::cors()
//.allow_any_origin()
.allow_origins(origins)
//.allow_origins(vec![app_env.config.allow_origin.as_str(), "https://localhost:8000"])
.allow_credentials(true)
.allow_header("content-type")
.allow_header("Authorization")
.allow_methods(&[Method::GET, Method::POST, Method::DELETE]);
let auth_api =
// Auth routes for login & logout REQUIRED
app_auth_filters::auth(app_store.clone(),auth_store.clone(),cors.clone()); // .with(cors.clone());
let gqli_api =
// // If not graphiQL comment/remove next line Interface GiQL MUST BEFORE graphql post with schema
// app_api.to_owned()
graphql::graphiql(gql_path, giql_path, data_dbs.clone()).await;
if giql_path.len() > 0 {
println!(
"GraphiQL url: {}://{}:{}/{}",
&app.protocol, &app.host, &app.port, &giql_path
);
}
let mut cloud = Cloud::default();
load_cloud_env(&mut cloud).await;
// app_api.to_owned()
// If not graphQL comment/remove next line
let gql_api=graphql::graphql(gql_path, data_dbs.clone(),cors.clone()).await; //.with(cors.clone());
// // Add ALL ENTITIES to work with here
let kloud_api = filters::CollFilters::new("kloud")
.filters_config(data_dbs.clone(),cloud.clone(),cors.clone());
// let ta_api =
// filters::CollFilters::new("ta").filters(&app_env.config, data_dbs.clone(),cors.clone());
// let tp_api = filters::CollFilters::new("tp").filters(&app_env.config, data_dbs.clone(),cors.clone());
// .or(tracking_point::load_tp_filters().filters(&app_env.config, data_dbs.clone()))
// .or(topographic_anatomy::filters::ta(&app_env.config, data_dbs.clone()))
// .or(tracking_point::filters::tp(&app_env.config, data_dbs.clone()))
let file_api = app_file_filters::files(app_store.clone(),auth_store.clone()).with(cors.clone());
// Path for static files, better to be LAST
let fs_api = warp::fs::dir(html_path).with(warp::compression::gzip());
// Recover and handle errors
let app_api = auth_api
.or(gqli_api).or(gql_api)
.or(kloud_api)
// .or(ta_api)
// .or(tp_api)
.or(file_api)
.or(fs_api)
.recover(move | error: warp::Rejection| handle_rejection(error, app_store.clone()))
.boxed();
// Wrap routes with log to get info
let routes = app_api.with(warp::log(log_name));
// let routes = app_api.with(cors).with(warp::log(log_name));
println!(
"Starting http server: {}://{}:{}",
&app.protocol, &app.host, &app.port
);
envmnt::set("ZTERTON", format!("{}:{}",&app.host,&app.port));
println!("Web services: done {} __________ ",chrono::Utc::now().timestamp());
if app.protocol.clone().as_str() == "http" {
warp::serve(routes.to_owned())
.run(socket)
.await;
} else {
let cert_pem = format!("{}/ssl/{}", app_env.config.resources_path, "cert.pem");
let key_pem = format!("{}/ssl/{}", &app_env.config.resources_path, "key.pem");
warp::serve(routes)
.tls()
.cert_path(cert_pem)
.key_path(key_pem)
.run(socket)
.await;
}
Ok(())
}
pub async fn run_cache_clouds() {
let args: Vec<String> = std::env::args().collect();
let mut arg_cfg_path = String::from("");
let mut arg_env_path = String::from("");
args.iter().enumerate().for_each(|(idx,arg)| {
if arg == "-c" {
arg_cfg_path=args[idx+1].to_owned();
} else if arg == "-e" {
arg_env_path=args[idx+1].to_owned();
}
});
println!("Cache service on Clouds: run {} __________ {} / {} ",chrono::Utc::now().timestamp(),&arg_cfg_path,&arg_env_path);
let mut cloud = Cloud::default();
load_cloud_env(&mut cloud).await;
let mut app_env = AppEnv::default();
let config_content = Config::load_file_content("quiet", &arg_cfg_path);
if ! config_content.contains("run_mode") {
return;
}
app_env.config = Config::new(config_content,"quiet");
let app_store = AppStore::new(AppData::new(app_env.to_owned()));
let auth_store = create_auth_store(&app_env,"quiet").await;
let mut headers = HeaderMap::new();
headers.insert(http::header::HOST, "localhost".parse().unwrap());
let reqenv = ReqEnv::new(
app_store, auth_store,
headers,
Method::GET,
"/config", "config", "kloud"
);
let _ = make_cloud_cache(&reqenv,&cloud).await;
println!("Cache service on Clouds: done {} __________ ",chrono::Utc::now().timestamp());
}
pub async fn run_check_clouds() {
let args: Vec<String> = std::env::args().collect();
let mut arg_cfg_path = String::from("");
let mut arg_env_path = String::from("");
args.iter().enumerate().for_each(|(idx,arg)| {
if arg == "-c" {
arg_cfg_path=args[idx+1].to_owned();
} else if arg == "-e" {
arg_env_path=args[idx+1].to_owned();
}
});
println!("Check Cloud services: run {} __________ {} / {} ",chrono::Utc::now().timestamp(),&arg_cfg_path,&arg_env_path);
let mut cloud = Cloud::default();
load_cloud_env(&mut cloud).await;
let mut app_env = AppEnv::default();
let config_content = Config::load_file_content("quiet",&arg_cfg_path);
if ! config_content.contains("run_mode") {
return;
}
app_env.config = Config::new(config_content,"quiet");
let app_store = AppStore::new(AppData::new(app_env.to_owned()));
let auth_store = create_auth_store(&app_env,"quiet").await;
let mut headers = HeaderMap::new();
headers.insert(http::header::HOST, "localhost".parse().unwrap());
let reqenv = ReqEnv::new(
app_store, auth_store,
headers,
Method::GET,
"/config", "config", "kloud"
);
let _ = run_clouds_check(&reqenv,&cloud).await;
println!("Check Cloud service: done {} __________ ",chrono::Utc::now().timestamp());
}
// for standalone server & async use
// #[tokio::main]
// pub async fn main() -> Result<()> {
pub fn main() -> Result<()> {
let args: Vec<String> = std::env::args().collect();
// println!("I got {:?} arguments: {:?}.", args.len() - 1, &args[1..]);
if args.len() > 1 && ( args[1] == "-h" || args[1] == "--help") {
println!("{} USAGE: -c config-toml -e env.file",&args[0]);
}
let mut arg_cfg_path = String::from("");
let mut arg_env_path = String::from("");
args.iter().enumerate().for_each(|(idx,arg)| {
if arg == "-c" {
arg_cfg_path=args[idx+1].to_owned();
} else if arg == "-e" {
arg_env_path=args[idx+1].to_owned();
}
});
// assert!(output.is_ok());
let loop_duration: u64;
let run_cache: bool;
let run_check: bool;
{
let config_content = Config::load_file_content("quiet", &arg_cfg_path);
if config_content.contains("run_mode") {
let config = Config::new(config_content,"quiet");
loop_duration = config.loop_duration;
// loop_duration = 10;
run_cache = config.run_cache;
run_check = config.run_check;
if run_cache {
println!("Running 'cloud_cache' every {} seconds in LOOP",&loop_duration);
}
if run_check {
println!("Running 'cloud_check' every {} seconds in LOOP",&loop_duration);
}
} else {
loop_duration = 0;
run_cache = false;
run_check = false;
}
}
// println!("content: {}",&config_content);
let rt = tokio::runtime::Runtime::new().unwrap_or_else(|e|
panic!("Error create tokio runtime {}",e)
);
loop {
rt.block_on(async move {
tokio::spawn(async move {up_web_server().await });
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
if run_check {
tokio::spawn(async {run_check_clouds().await }); // For async task
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
}
// {
// // For blocking task:
// let join_handle = tokio::task::spawn_blocking(|| do_my_task());
// tokio::spawn(async { do_my_task().await; });
// join_handle.await; // TODO: should handle error here
// }
if run_cache {
tokio::spawn(async {run_cache_clouds().await }); // For async task
}
println!("LOOP: {} __________",chrono::Utc::now().timestamp());
tokio::time::sleep(tokio::time::Duration::from_secs(loop_duration)).await;
});
}
}

114
src/reqenv.rs Normal file
View File

@ -0,0 +1,114 @@
//use std::collections::HashMap;
use std::fmt;
//use std::str::from_utf8;
//use tera::Tera;
use warp::{
http::{method::Method, HeaderMap, HeaderValue},
// Filter,
};
use reqtasks::ReqTasks;
use app_env::{
appenv::AppEnv,
config::Config,
module::Module,
AppStore,
// AppData,
};
use app_auth::{
AuthStore,
UserCtx,
LoginRequest,
// BEARER_PREFIX,
// AuthError,
};
/// `ReqEnv` includes ReqTasks as core type
/// it is a kind of wrapping type
/// to declare:
/// - auth methods locally
/// - other attributes
/// - other request tasks methods
///
#[derive(Clone)]
pub struct ReqEnv {
pub req: ReqTasks,
}
impl fmt::Display for ReqEnv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{} {} {}", &self.req.path, &self.req.origin, &self.req.key_module)
}
}
impl ReqEnv {
pub fn new(
app_db: AppStore,
auth_store: AuthStore,
header: HeaderMap<HeaderValue>,
method: Method,
path: &str,
origin: &str,
key_module: &str
) -> Self {
let app_data = app_db.app_data.read();
// let auth_store: &'a AuthStore = &AuthStore {
// users: auth_db.users.clone(),
// sessions: auth_db.sessions.clone(),
// enforcer: auth_db.enforcer.clone(),
// };
Self {
req: ReqTasks {
app_data: app_data.to_owned(),
auth_store: auth_store.to_owned(),
header,
method,
path: format!("{}{}",key_module,path).to_string(),
origin: format!("{}{}",key_module,origin).to_string(),
key_module: key_module.to_string(),
},
}
}
/// Get `AppEnv`
#[must_use]
pub fn env(&self) -> AppEnv {
self.req.env()
}
/// Get Tera
#[must_use]
pub fn tera(&self) -> tera::Tera {
self.req.tera()
}
/// Get Context (ctx)
#[must_use]
pub fn ctx(&self) -> tera::Context {
self.req.ctx()
}
/// Get `AppEnv` Config
#[must_use]
pub fn config(&self) -> Config {
self.req.config()
}
#[must_use]
pub fn module(&self) -> Module {
self.req.module()
}
#[must_use]
pub fn lang(&self) -> String {
self.req.lang()
}
#[allow(clippy::missing_errors_doc)]
pub fn token_from_header(&self) -> anyhow::Result<String> {
self.req.token_from_header()
}
#[allow(clippy::missing_errors_doc)]
pub async fn token_session(&self, login: &LoginRequest) -> anyhow::Result<String> {
self.req.token_session(login).await
}
#[allow(clippy::missing_errors_doc)]
pub async fn user_authentication(&self) -> anyhow::Result<UserCtx> {
self.req.user_authentication().await
}
}