diff --git a/Cargo.toml b/Cargo.toml index 45d010f15..284063a86 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -65,8 +65,9 @@ strum = { version = "0.26", features = ["derive"] } # tardis = { version = "0.1.0-rc.16" } # tardis = { path = "../tardis/tardis" } tardis = { git = "https://github.com/ideal-world/tardis.git", rev = "03ef942" } -asteroid-mq = { git = "https://github.com/4t145/asteroid-mq.git", rev = "d59c64d" } -# asteroid-mq = { path = "../asteroid/asteroid-mq" } +# asteroid-mq = { git = "https://github.com/4t145/asteroid-mq.git", rev = "d59c64d" } +asteroid-mq = { git = "https://github.com/4t145/asteroid-mq.git", rev = "83a6643" } +asteroid-mq-sdk = { git = "https://github.com/4t145/asteroid-mq.git", rev = "83a6643" } #spacegate # spacegate-shell = { path = "../spacegate/crates/shell", features = [ diff --git a/backend/basic/src/process/task_processor.rs b/backend/basic/src/process/task_processor.rs index 443955545..14cf8eda0 100644 --- a/backend/basic/src/process/task_processor.rs +++ b/backend/basic/src/process/task_processor.rs @@ -311,13 +311,13 @@ struct TaskExecuteEventReq { } impl EventAttribute for TaskSetStatusEventReq { - const SUBJECT: Subject = Subject::const_new(EVENT_SET_TASK_STATUS_FLAG.as_bytes()); + const SUBJECT: Subject = Subject::const_new(EVENT_SET_TASK_STATUS_FLAG); } impl EventAttribute for TaskSetProcessDataEventReq { - const SUBJECT: Subject = Subject::const_new(EVENT_SET_TASK_PROCESS_DATA_FLAG.as_bytes()); + const SUBJECT: Subject = Subject::const_new(EVENT_SET_TASK_PROCESS_DATA_FLAG); } impl EventAttribute for TaskExecuteEventReq { - const SUBJECT: Subject = Subject::const_new(EVENT_EXECUTE_TASK_FLAG.as_bytes()); + const SUBJECT: Subject = Subject::const_new(EVENT_EXECUTE_TASK_FLAG); } diff --git a/backend/basic/src/spi/spi_initializer.rs b/backend/basic/src/spi/spi_initializer.rs index 89b43d1f6..630c5dd23 100644 --- a/backend/basic/src/spi/spi_initializer.rs +++ b/backend/basic/src/spi/spi_initializer.rs @@ -178,6 +178,13 @@ pub mod common_pg { common::get_isolation_flag_from_ext(ext) } + /// Get the table full name from the extension + /// 根据入参生成对应表全限定名 + pub fn get_table_full_name(ext: &HashMap, table_flag: String, tag: String) -> String { + let schema_name = get_schema_name_from_ext(ext).expect("ignore"); + return format!("{schema_name}.{GLOBAL_STORAGE_FLAG}_{table_flag}_{tag}"); + } + /// Check if the schema exists /// 检查schema是否存在 pub async fn check_schema_exit(client: &TardisRelDBClient, ctx: &TardisContext) -> TardisResult { @@ -279,6 +286,7 @@ pub mod common_pg { table_flag: &str, // Create table DDL table_create_content: &str, + table_inherits: Option, // Table index // Format: field name -> index type indexes: Vec<(&str, &str)>, @@ -295,7 +303,18 @@ pub mod common_pg { } else if !mgr { return Err(TardisError::bad_request("The requested tag does not exist", "")); } - do_init_table(&schema_name, &conn, &tag, table_flag, table_create_content, indexes, primary_keys, update_time_field).await?; + do_init_table( + &schema_name, + &conn, + &tag, + table_flag, + table_create_content, + table_inherits, + indexes, + primary_keys, + update_time_field, + ) + .await?; Ok((conn, format!("{schema_name}.{GLOBAL_STORAGE_FLAG}_{table_flag}{tag}"))) } @@ -322,7 +341,7 @@ pub mod common_pg { ) -> TardisResult<()> { let tag = tag.map(|t| format!("_{t}")).unwrap_or_default(); let schema_name = get_schema_name_from_context(ctx); - do_init_table(&schema_name, conn, &tag, table_flag, table_create_content, indexes, primary_keys, update_time_field).await + do_init_table(&schema_name, conn, &tag, table_flag, table_create_content, None, indexes, primary_keys, update_time_field).await } async fn do_init_table( @@ -331,6 +350,7 @@ pub mod common_pg { tag: &str, table_flag: &str, table_create_content: &str, + table_inherits: Option, // field_name_or_fun -> index type indexes: Vec<(&str, &str)>, primary_keys: Option>, @@ -341,7 +361,12 @@ pub mod common_pg { r#"CREATE TABLE {schema_name}.{GLOBAL_STORAGE_FLAG}_{table_flag}{tag} ( {table_create_content} -)"# + ){}"#, + if let Some(inherits) = table_inherits { + format!(" INHERITS ({inherits})") + } else { + "".to_string() + } ), vec![], ) diff --git a/backend/gateways/spacegate-plugins/Cargo.toml b/backend/gateways/spacegate-plugins/Cargo.toml index 18136c3d2..ea0996ab2 100644 --- a/backend/gateways/spacegate-plugins/Cargo.toml +++ b/backend/gateways/spacegate-plugins/Cargo.toml @@ -21,11 +21,12 @@ spacegate-shell = { workspace = true, features = [ "k8s", "ext-redis", "ext-axum", + "plugin-east-west-traffic-white-list", ] } bios-sdk-invoke = { path = "../../../frontend/sdks/invoke", features = [ "spi_log", -] } +], default-features = false } jsonpath-rust = "0.3.1" diff --git a/backend/gateways/spacegate-plugins/src/plugin/audit_log.rs b/backend/gateways/spacegate-plugins/src/plugin/audit_log.rs index c552e94a9..8f3e167b0 100644 --- a/backend/gateways/spacegate-plugins/src/plugin/audit_log.rs +++ b/backend/gateways/spacegate-plugins/src/plugin/audit_log.rs @@ -209,18 +209,21 @@ impl AuditLogPlugin { if !self.log_url.is_empty() && !self.spi_app_id.is_empty() { tokio::task::spawn(async move { match spi_log_client::SpiLogClient::add( - &LogItemAddReq { + LogItemAddReq { tag, - content: TardisFuns::json.obj_to_string(&content).unwrap_or_default(), + content: TardisFuns::json.obj_to_json(&content).unwrap_or_default(), kind: None, ext: Some(content.to_value()), key: None, op: Some(content.op), rel_key: None, - id: None, + idempotent_id: None, ts: Some(tardis::chrono::Utc::now()), owner: content.user_id, own_paths: None, + msg: None, + owner_name: None, + push: false, }, &funs, &spi_ctx, diff --git a/backend/gateways/spacegate-plugins/src/plugin/auth.rs b/backend/gateways/spacegate-plugins/src/plugin/auth.rs index 71ecb731f..d78687634 100644 --- a/backend/gateways/spacegate-plugins/src/plugin/auth.rs +++ b/backend/gateways/spacegate-plugins/src/plugin/auth.rs @@ -8,6 +8,7 @@ use bios_auth::{ serv::{auth_crypto_serv, auth_kernel_serv, auth_res_serv}, }; +use http::header::HOST; use serde::{Deserialize, Serialize}; use spacegate_shell::{ hyper::{ @@ -15,12 +16,17 @@ use spacegate_shell::{ http::{HeaderMap, HeaderName, HeaderValue, StatusCode}, Method, Request, Response, }, - kernel::{extension::Reflect, helper_layers::function::Inner, SgRequest}, + kernel::{ + extension::{IsEastWestTraffic, Reflect}, + helper_layers::function::Inner, + SgRequest, + }, plugin::{Plugin, PluginConfig, PluginError}, BoxError, SgBody, SgRequestExt, }; use std::{ collections::HashMap, + ops::Deref, str::FromStr, sync::{Arc, Once, OnceLock}, }; @@ -28,7 +34,7 @@ use tardis::{ basic::{error::TardisError, result::TardisResult}, cache::AsyncCommands as _, config::config_dto::CacheModuleConfig, - log::{self, warn}, + tracing::{self as tracing, instrument, warn}, serde_json::{self, json}, tokio::{sync::RwLock, task::JoinHandle}, url::Url, @@ -69,7 +75,7 @@ impl SgPluginAuthConfig { let mut instance = INSTANCE.get_or_init(Default::default).write().await; if let Some((md5, handle)) = instance.as_ref() { if config_md5.eq(md5) { - log::trace!("[SG.Filter.Auth] have not found config change"); + tracing::trace!("[SG.Filter.Auth] have not found config change"); return Ok(()); } else { handle.abort(); @@ -98,7 +104,7 @@ impl SgPluginAuthConfig { tardis::TardisFuns::hot_reload(tardis_config).await?; let handle = auth_initializer::init_without_webserver().await?; *instance = Some((config_md5, handle)); - log::info!("[SG.Filter.Auth] init done"); + tracing::info!("[SG.Filter.Auth] init done"); Ok(()) } } @@ -136,10 +142,10 @@ pub struct AuthPlugin { /// /// e.g /// - /// |request mix url|replace_url| result | - /// |---------------|-----------|---------------------| - /// | `/apis` | `apis` | `/{true_url}` | - /// |`/prefix/apis` | `apis` |`/prefix/{true_url}` | + /// |request mix url|mix_replace_url| result | + /// |---------------|---------------|---------------------| + /// | `/apis` | `apis` | `/{true_url}` | + /// |`/prefix/apis` | `apis` |`/prefix/{true_url}` | mix_replace_url: String, /// Remove prefix of AuthReq path. /// use for [ctx_to_auth_req] @@ -240,9 +246,9 @@ impl AuthPlugin { return Ok(req); } - log::trace!("[SG.Filter.Auth] request filter info: request path is {}", req.uri().path()); + tracing::trace!("[SG.Filter.Auth] request filter info: request url is {}", req.uri()); if method == http::Method::GET && req.uri().path().trim_matches('/') == self.fetch_server_config_path.as_str().trim_matches('/') { - log::debug!("[SG.Filter.Auth] request path hit fetch server config path: {}", self.fetch_server_config_path); + tracing::debug!("[SG.Filter.Auth] request path hit fetch server config path: {}", self.fetch_server_config_path); let mock_resp = Response::builder() .header(http::header::CONTENT_TYPE, HeaderValue::from_static("application/json")) .status(http::StatusCode::OK) @@ -260,9 +266,10 @@ impl AuthPlugin { let is_true_mix_req = self.is_mix_req(req.headers()); - if self.auth_config.strict_security_mode && !is_true_mix_req { - log::debug!("[SG.Filter.Auth] handle mix request"); - return Ok(handle_mix_req(&self.auth_config, &self.mix_replace_url, req).await.map_err(PluginError::internal_error::)?); + let is_east_west_traffic = req.extensions().get::().map(Deref::deref).unwrap_or(&false); + if self.auth_config.strict_security_mode && !is_true_mix_req && !is_east_west_traffic { + tracing::debug!("[SG.Filter.Auth] handle mix request"); + return Ok(handle_mix_req(&self, req).await.map_err(PluginError::internal_error::)?); } req.headers_mut().append(&self.header_is_mix_req, HeaderValue::from_static("false")); @@ -270,31 +277,31 @@ impl AuthPlugin { match auth_kernel_serv::auth(&mut auth_req, is_true_mix_req).await { Ok(auth_result) => { - if log::level_enabled!(log::Level::TRACE) { - log::trace!("[SG.Filter.Auth] auth return ok {:?}", auth_result); - } else if log::level_enabled!(log::Level::DEBUG) { + if tracing::level_enabled!(tracing::Level::TRACE) { + tracing::trace!("[SG.Filter.Auth] auth return ok {:?}", auth_result); + } else if tracing::level_enabled!(tracing::Level::DEBUG) { if let Some(ctx) = &auth_result.ctx { - log::debug!("[SG.Filter.Auth] auth return ok ctx:{ctx}",); + tracing::debug!("[SG.Filter.Auth] auth return ok ctx:{ctx}",); } else { - log::debug!("[SG.Filter.Auth] auth return ok ctx:None",); + tracing::debug!("[SG.Filter.Auth] auth return ok ctx:None",); }; } if auth_result.e.is_none() { req = success_auth_result_to_req(auth_result, &self.auth_config, req).map_err(PluginError::internal_error::)?; } else if let Some(e) = auth_result.e { - log::info!("[SG.Filter.Auth] auth failed:{e}"); + tracing::info!("[SG.Filter.Auth] auth failed:{e}"); let err_resp = Response::builder() .header(http::header::CONTENT_TYPE, HeaderValue::from_static("application/json")) .status(StatusCode::from_str(&e.code).unwrap_or(StatusCode::BAD_GATEWAY)) - .body(SgBody::full(json!({"code":format!("{}-gateways-cert-error",e.code),"message":e.message}).to_string())) + .body(SgBody::full(json!({"code":format!("{}-gateway-cert-error",e.code),"message":e.message}).to_string())) .map_err(PluginError::internal_error::)?; return Err(err_resp); }; Ok(req) } Err(e) => { - log::info!("[SG.Filter.Auth] auth return error {:?}", e); + tracing::info!("[SG.Filter.Auth] auth return error {:?}", e); let err_resp = Response::builder() .header(http::header::CONTENT_TYPE, HeaderValue::from_static("application/json")) .status(StatusCode::from_str(&e.code).unwrap_or(StatusCode::BAD_GATEWAY)) @@ -341,35 +348,57 @@ impl AuthPlugin { } } -async fn handle_mix_req(auth_config: &AuthConfig, mix_replace_url: &str, req: SgRequest) -> Result { +#[instrument(name="[SG.Filter.Auth.MixReq]",level = "trace", skip_all, fields(req_uri=req.uri().to_string()))] +async fn handle_mix_req(plugin_config: &AuthPlugin, req: SgRequest) -> Result { + let auth_config = &plugin_config.auth_config; let (mut parts, mut body) = req.into_parts(); if !body.is_dumped() { body = body.dump().await?; } let string_body = String::from_utf8_lossy(body.get_dumped().expect("not expect code")).trim_matches('"').to_string(); if string_body.is_empty() { - return Err("[SG.Filter.Auth.MixReq] body can't be empty".into()); + return Err(" body can't be empty".into()); } let mut req_headers = parts.headers.iter().map(|(k, v)| (k.as_str().to_string(), v.to_str().expect("error parse header value to str").to_string())).collect(); let (body, crypto_headers) = auth_crypto_serv::decrypt_req(&req_headers, &Some(string_body), true, true, auth_config).await?; req_headers.remove(&auth_config.head_key_crypto); req_headers.remove(&auth_config.head_key_crypto.to_ascii_lowercase()); - let body = body.ok_or_else(|| TardisError::custom("500", "[SG.Filter.Auth.MixReq] decrypt body can't be empty", "500-parse_mix_req-parse-error"))?; + let body = body.ok_or_else(|| TardisError::custom("500", " decrypt body can't be empty", "500-parse_mix_req-parse-error"))?; let mix_body = TardisFuns::json.str_to_obj::(&body)?; - // ctx.set_action(SgRouteFilterRequestAction::Redirect); - let mut true_uri = Url::from_str(&parts.uri.to_string().replace(mix_replace_url, &mix_body.uri)) - .map_err(|e| TardisError::custom("500", &format!("[SG.Filter.Auth.MixReq] url parse err {e}"), "500-parse_mix_req-url-error"))?; - true_uri.set_path(&true_uri.path().replace("//", "/")); - true_uri.set_query(Some(&if let Some(old_query) = true_uri.query() { - format!("{}&_t={}", old_query, mix_body.ts) - } else { - format!("_t={}", mix_body.ts) - })); - parts.uri = true_uri.as_str().parse().map_err(|e| TardisError::custom("500", &format!("[SG.Filter.Auth.MixReq] uri parse error: {}", e), ""))?; + let true_uri = parts.uri.to_string().replace(&plugin_config.mix_replace_url, &mix_body.uri).replace("//", "/"); + tracing::trace!(?true_uri," string true uri:"); + let mut true_uri_parts = + true_uri.parse::().map_err(|e| TardisError::custom("500", &format!(" url parse err {e}"), "500-parse_mix_req-url-error"))?.into_parts(); + + let host = parts.uri.host().map(String::from).or(parts.headers.get(HOST).and_then(|x| x.to_str().map(String::from).ok())); + if let Some(host) = host { + true_uri_parts.authority = Some(http::uri::Authority::from_str(&host).map_err(|e| { + TardisError::custom( + "500", + &format!(" error parse str {host} to authority :{e}"), + "500-parse_mix_req-authority-error", + ) + })?); + } + let old_scheme = parts.uri.scheme().cloned().unwrap_or_else(|| { + if let Some(port) = true_uri_parts.authority.clone().and_then(|a| a.port_u16()) { + if port == 443 { + http::uri::Scheme::HTTPS + } else { + http::uri::Scheme::HTTP + } + } else { + http::uri::Scheme::HTTP + } + }); + true_uri_parts.scheme = Some(old_scheme); + let true_uri = http::Uri::from_parts(true_uri_parts)?; + tracing::trace!(" raw url:[{}],true url:[{}]", parts.uri.to_string(), true_uri); + parts.uri = true_uri; parts.method = Method::from_str(&mix_body.method.to_ascii_uppercase()) - .map_err(|e| TardisError::custom("500", &format!("[SG.Filter.Auth.MixReq] method parse err {e}"), "500-parse_mix_req-method-error"))?; + .map_err(|e| TardisError::custom("500", &format!(" method parse err {e}"), "500-parse_mix_req-method-error"))?; let mut headers = req_headers; headers.extend(mix_body.headers); @@ -380,17 +409,20 @@ async fn handle_mix_req(auth_config: &AuthConfig, mix_replace_url: &str, req: Sg .into_iter() .map(|(k, v)| { Ok::<_, TardisError>(( - HeaderName::from_str(&k).map_err(|e| TardisError::format_error(&format!("[SG.Filter.Auth] error parse str {k} to header name :{e}"), ""))?, - HeaderValue::from_str(&v).map_err(|e| TardisError::format_error(&format!("[SG.Filter.Auth] error parse str {v} to header value :{e}"), ""))?, + HeaderName::from_str(&k).map_err(|e| TardisError::format_error(&format!(" error parse str {k} to header name :{e}"), ""))?, + HeaderValue::from_str(&v).map_err(|e| TardisError::format_error(&format!(" error parse str {v} to header value :{e}"), ""))?, )) }) .collect::>>()?, ); + parts.headers.remove(plugin_config.header_is_same_req.clone()); + parts.headers.append(plugin_config.header_is_mix_req.clone(), HeaderValue::from_static("true")); let new_body = SgBody::full(mix_body.body); - // ctx.request.set_header_str(&self.header_is_mix_req, "true")?; - Ok(Request::from_parts(parts, new_body)) + let mut new_req = Request::from_parts(parts, new_body); + spacegate_shell::kernel::utils::req_length_or_chunked(&mut new_req); + Ok(new_req) } /// # Convert Request to AuthReq @@ -496,7 +528,7 @@ async fn resp_to_auth_encrypt_req(resp: Response) -> TardisResult<(AuthE parts.extensions.insert(BeforeEncryptBody::new(resp_body.clone())); } let string_body = String::from_utf8_lossy(resp_body); - log::trace!("[SG.Filter.Auth] Before Encrypt Body {}", string_body); + tracing::trace!("[SG.Filter.Auth] Before Encrypt Body {}", string_body); Ok(( AuthEncryptReq { headers, diff --git a/backend/gateways/spacegate-plugins/src/plugin/auth/tests.rs b/backend/gateways/spacegate-plugins/src/plugin/auth/tests.rs index 22a72b415..cd255bf3c 100644 --- a/backend/gateways/spacegate-plugins/src/plugin/auth/tests.rs +++ b/backend/gateways/spacegate-plugins/src/plugin/auth/tests.rs @@ -68,7 +68,7 @@ async fn test() { // let req_body = before_filter_ctx.response.take_body_into_bytes().await.unwrap(); // let req_body = String::from_utf8_lossy(&req_body).to_string(); // assert!(!req_body.is_empty()); -// assert_eq!(req_body, "{\"code\":\"401-gateways-cert-error\",\"message\":\"[Auth] Token [aaa] is not legal\"}"); +// assert_eq!(req_body, "{\"code\":\"401-gateway-cert-error\",\"message\":\"[Auth] Token [aaa] is not legal\"}"); // cache_client.set(&format!("{}tokenxxx", filter_auth.auth_config.cache_key_token_info), "default,accountxxx").await.unwrap(); // cache_client diff --git a/backend/middlewares/event/Cargo.toml b/backend/middlewares/event/Cargo.toml index 8270a00e2..6262d9187 100644 --- a/backend/middlewares/event/Cargo.toml +++ b/backend/middlewares/event/Cargo.toml @@ -29,8 +29,8 @@ bios-sdk-invoke = { path = "../../../frontend/sdks/invoke", features = [ "spi_log", "event", ], default-features = false } -asteroid-mq = { workspace = true, features = ["cluster-k8s", "json"] } - +asteroid-mq = { workspace = true, features = ["cluster-k8s"] } +pin-project-lite = { version = "0.2" } [dev-dependencies] tardis = { workspace = true, features = [ @@ -42,3 +42,4 @@ tardis = { workspace = true, features = [ ] } bios-basic = { path = "../../basic", features = ["default", "test"] } tokio = { version = "1", features = ["full"] } +asteroid-mq-sdk = { workspace = true } \ No newline at end of file diff --git a/backend/middlewares/event/src/api.rs b/backend/middlewares/event/src/api.rs index 316071fdc..d39b383e3 100644 --- a/backend/middlewares/event/src/api.rs +++ b/backend/middlewares/event/src/api.rs @@ -1,3 +1,2 @@ -pub mod event_listener_api; -pub mod event_proc_api; -pub mod event_topic_api; +pub mod ca; +pub mod ci; diff --git a/backend/middlewares/event/src/api/ca.rs b/backend/middlewares/event/src/api/ca.rs new file mode 100644 index 000000000..6d72ec04f --- /dev/null +++ b/backend/middlewares/event/src/api/ca.rs @@ -0,0 +1,2 @@ +pub mod event_connect_api; +pub mod event_register_api; diff --git a/backend/middlewares/event/src/api/ca/event_connect_api.rs b/backend/middlewares/event/src/api/ca/event_connect_api.rs new file mode 100644 index 000000000..6b3b05937 --- /dev/null +++ b/backend/middlewares/event/src/api/ca/event_connect_api.rs @@ -0,0 +1,62 @@ +use asteroid_mq::prelude::{Node, NodeId}; +use asteroid_mq::protocol::node::edge::codec::CodecKind; +use asteroid_mq::protocol::node::edge::packet::Auth; +use asteroid_mq::protocol::node::edge::EdgeConfig; +use tardis::web::poem::web::websocket::{BoxWebSocketUpgraded, WebSocket}; +use tardis::web::poem_openapi; +use tardis::web::poem_openapi::param::Query; +use tardis::web::reqwest::StatusCode; +use tardis::{log as tracing, TardisFuns}; + +use crate::serv::event_connect_serv::PoemWs; +use crate::serv::event_register_serv::EventRegisterServ; + +#[derive(Clone, Default, Debug)] +pub struct EventConnectApi { + register_serv: EventRegisterServ, +} + +/// Event Connect API +/// +/// 事件处理API +#[poem_openapi::OpenApi(prefix_path = "/ca/connect")] +impl EventConnectApi { + /// Connect client nodes + /// + /// 连接客户端节点 + #[oai(path = "/", method = "get")] + async fn ws_process(&self, node_id: Query, websocket: WebSocket) -> Result { + let peer_id = NodeId::from_base64(&node_id).map_err(|e| tardis::web::poem::Error::from_string(e.to_string(), StatusCode::BAD_REQUEST))?; + let _ctx = self.register_serv.get_ctx(peer_id).await.map_err(|e| tardis::web::poem::Error::from_string(e.to_string(), StatusCode::UNAUTHORIZED))?; + let config = EdgeConfig { + peer_id, + supported_codec_kinds: vec![CodecKind::JSON].into_iter().collect(), + peer_auth: Auth {}, + }; + let Some(node) = TardisFuns::store().get_singleton::() else { + return Err(tardis::web::poem::Error::from_string( + "mq server node have not initialized", + tardis::web::poem::http::StatusCode::INTERNAL_SERVER_ERROR, + )); + }; + let register_serv = self.register_serv.clone(); + let upgraded: BoxWebSocketUpgraded = websocket.on_upgrade(Box::new(|stream| { + Box::pin(async move { + let ws = PoemWs::new(stream); + let Ok(node_id) = node.create_edge_connection(ws, config).await.inspect_err(|e| { + tracing::error!(?e, "failed to create edge connection"); + }) else { + return; + }; + tracing::info!(?node_id, "edge connected"); + let Some(connection) = node.get_edge_connection(node_id) else { + return; + }; + let _ = connection.finish_signal.recv_async().await; + let _ = register_serv.unregister_ctx(node_id).await; + tracing::info!(?node_id, "edge disconnected"); + }) + })); + Ok(upgraded) + } +} diff --git a/backend/middlewares/event/src/api/ca/event_register_api.rs b/backend/middlewares/event/src/api/ca/event_register_api.rs new file mode 100644 index 000000000..d531711da --- /dev/null +++ b/backend/middlewares/event/src/api/ca/event_register_api.rs @@ -0,0 +1,27 @@ +use tardis::web::context_extractor::TardisContextExtractor; +use tardis::web::poem_openapi; +use tardis::web::web_resp::{TardisApiResult, TardisResp}; + +use crate::dto::event_dto::EventRegisterResp; + +use crate::serv::event_register_serv; +#[derive(Clone, Default, Debug)] +pub struct EventRegisterApi { + register_serv: event_register_serv::EventRegisterServ, +} + +/// Event Node Register API +/// +/// 事件注册节点API +#[poem_openapi::OpenApi(prefix_path = "/ca/register")] +impl EventRegisterApi { + /// Register event node + /// + /// 注册事件监听器 + #[oai(path = "/", method = "put")] + async fn register(&self, ctx: TardisContextExtractor) -> TardisApiResult { + let node_id = self.register_serv.register_ctx(&ctx.0).await?; + let resp = EventRegisterResp { node_id: node_id.to_base64() }; + TardisResp::ok(resp) + } +} diff --git a/backend/middlewares/event/src/api/ci.rs b/backend/middlewares/event/src/api/ci.rs new file mode 100644 index 000000000..1d0ecb600 --- /dev/null +++ b/backend/middlewares/event/src/api/ci.rs @@ -0,0 +1 @@ +pub mod event_topic_api; diff --git a/backend/middlewares/event/src/api/event_topic_api.rs b/backend/middlewares/event/src/api/ci/event_topic_api.rs similarity index 71% rename from backend/middlewares/event/src/api/event_topic_api.rs rename to backend/middlewares/event/src/api/ci/event_topic_api.rs index f545b351e..d28e1774e 100644 --- a/backend/middlewares/event/src/api/event_topic_api.rs +++ b/backend/middlewares/event/src/api/ci/event_topic_api.rs @@ -6,16 +6,16 @@ use tardis::web::poem_openapi::param::{Path, Query}; use tardis::web::poem_openapi::payload::Json; use tardis::web::web_resp::{TardisApiResult, TardisPage, TardisResp, Void}; -use crate::dto::event_dto::{EventTopicAddOrModifyReq, EventTopicFilterReq, EventTopicInfoResp}; +use crate::dto::event_dto::{EventTopicAddOrModifyReq, EventTopicFilterReq, EventTopicInfoResp, SetTopicAuth}; use crate::event_constants::get_tardis_inst; -use crate::serv::event_topic_serv::EventDefServ; +use crate::serv::event_topic_serv::EventTopicServ; #[derive(Clone)] pub struct EventTopicApi; /// Event Topic API /// /// 事件主题API -#[poem_openapi::OpenApi(prefix_path = "/topic")] +#[poem_openapi::OpenApi(prefix_path = "/ci/topic")] impl EventTopicApi { /// Add Event Definition /// @@ -23,7 +23,7 @@ impl EventTopicApi { #[oai(path = "/", method = "post")] async fn add(&self, mut add_or_modify_req: Json, ctx: TardisContextExtractor) -> TardisApiResult { let funs = get_tardis_inst(); - let id = EventDefServ::add_item(&mut add_or_modify_req.0, &funs, &ctx.0).await?; + let id = EventTopicServ::add_item(&mut add_or_modify_req.0, &funs, &ctx.0).await?; TardisResp::ok(id) } @@ -33,7 +33,7 @@ impl EventTopicApi { #[oai(path = "/:id", method = "put")] async fn modify(&self, id: Path, mut add_or_modify_req: Json, ctx: TardisContextExtractor) -> TardisApiResult { let funs = get_tardis_inst(); - EventDefServ::modify_item(&id.0, &mut add_or_modify_req.0, &funs, &ctx.0).await?; + EventTopicServ::modify_item(&id.0, &mut add_or_modify_req.0, &funs, &ctx.0).await?; TardisResp::ok(Void {}) } @@ -43,7 +43,7 @@ impl EventTopicApi { #[oai(path = "/:id", method = "delete")] async fn delete(&self, id: Path, ctx: TardisContextExtractor) -> TardisApiResult { let funs = get_tardis_inst(); - EventDefServ::delete_item(&id.0, &funs, &ctx.0).await?; + EventTopicServ::delete_item(&id.0, &funs, &ctx.0).await?; TardisResp::ok(Void {}) } @@ -63,7 +63,7 @@ impl EventTopicApi { ctx: TardisContextExtractor, ) -> TardisApiResult> { let funs = get_tardis_inst(); - let result = EventDefServ::paginate_items( + let result = EventTopicServ::paginate_items( &EventTopicFilterReq { basic: RbumBasicFilterReq { ids: id.0.map(|id| vec![id]), @@ -82,4 +82,23 @@ impl EventTopicApi { .await?; TardisResp::ok(result) } + + /// Register user to topic + /// + /// 注册用户到主题 + #[oai(path = "/:topic_code/register", method = "put")] + async fn register(&self, topic_code: Path, read: Query, write: Query, ctx: TardisContextExtractor) -> TardisApiResult { + let funs = get_tardis_inst(); + EventTopicServ::register_user( + SetTopicAuth { + topic: topic_code.0, + read: read.0, + write: write.0, + }, + &funs, + &ctx.0, + ) + .await?; + TardisResp::ok(Void {}) + } } diff --git a/backend/middlewares/event/src/api/event_listener_api.rs b/backend/middlewares/event/src/api/event_listener_api.rs deleted file mode 100644 index 49e4dd408..000000000 --- a/backend/middlewares/event/src/api/event_listener_api.rs +++ /dev/null @@ -1,33 +0,0 @@ -use tardis::basic::error::TardisError; -use tardis::web::poem_openapi; -use tardis::web::poem_openapi::param::{Path, Query}; -use tardis::web::poem_openapi::payload::Json; -use tardis::web::web_resp::{TardisApiResult, TardisResp, Void}; - -use crate::dto::event_dto::{EventListenerRegisterReq, EventListenerRegisterResp}; -use crate::event_constants::get_tardis_inst; -use crate::serv::event_listener_serv; -#[derive(Clone)] -pub struct EventListenerApi; - -/// Event Listener API -/// -/// 事件监听器API -#[poem_openapi::OpenApi(prefix_path = "/listener")] -impl EventListenerApi { - /// Register event listener - /// - /// 注册事件监听器 - #[oai(path = "/", method = "post")] - async fn register(&self, listener: Json) -> TardisApiResult { - TardisResp::err(TardisError::not_implemented("unimplemented", "unimplemented")) - } - - /// Remove event listener - /// - /// 移除事件监听器 - #[oai(path = "/:listener_code", method = "delete")] - async fn remove(&self, listener_code: Path, token: Query) -> TardisApiResult { - TardisResp::err(TardisError::not_implemented("unimplemented", "unimplemented")) - } -} diff --git a/backend/middlewares/event/src/api/event_proc_api.rs b/backend/middlewares/event/src/api/event_proc_api.rs deleted file mode 100644 index f254c952a..000000000 --- a/backend/middlewares/event/src/api/event_proc_api.rs +++ /dev/null @@ -1,20 +0,0 @@ -use tardis::web::poem::web::websocket::{BoxWebSocketUpgraded, WebSocket}; -use tardis::web::poem_openapi; -use tardis::web::poem_openapi::param::{Path, Query}; - -#[derive(Clone)] -pub struct EventProcApi; - -/// Event Process API -/// -/// 事件处理API -#[poem_openapi::OpenApi(prefix_path = "/proc")] -impl EventProcApi { - /// Process event - /// - /// 处理事件 - #[oai(path = "/:listener_code", method = "get")] - async fn ws_process(&self, listener_code: Path, token: Query, websocket: WebSocket) -> Result { - Err(tardis::web::poem::Error::from_status(tardis::web::poem::http::StatusCode::NOT_IMPLEMENTED)) - } -} diff --git a/backend/middlewares/event/src/domain.rs b/backend/middlewares/event/src/domain.rs index a517ccd93..58f80add1 100644 --- a/backend/middlewares/event/src/domain.rs +++ b/backend/middlewares/event/src/domain.rs @@ -1,2 +1,4 @@ +pub mod event_auth; +pub mod event_message; pub mod event_persistent; pub mod event_topic; diff --git a/backend/middlewares/event/src/domain/event_auth.rs b/backend/middlewares/event/src/domain/event_auth.rs new file mode 100644 index 000000000..de92a9e5a --- /dev/null +++ b/backend/middlewares/event/src/domain/event_auth.rs @@ -0,0 +1,40 @@ +use tardis::db::sea_orm::prelude::*; + +use tardis::db::sea_orm; + +use tardis::{TardisCreateEntity, TardisEmptyBehavior, TardisEmptyRelation}; + +use crate::dto::event_dto::TopicAuth; +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, TardisCreateEntity, TardisEmptyBehavior, TardisEmptyRelation)] +#[sea_orm(table_name = "mq_auth")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: String, + #[sea_orm(indexed)] + pub topic: String, + #[sea_orm(indexed)] + pub ak: String, + pub read: bool, + pub write: bool, +} + +impl Model { + pub fn from_topic_auth(auth: TopicAuth) -> Self { + let id = format!("{}/{}", auth.topic, auth.ak); + Model { + id, + topic: auth.topic, + ak: auth.ak, + read: auth.read, + write: auth.write, + } + } + pub fn into_topic_auth(self) -> TopicAuth { + TopicAuth { + topic: self.topic, + ak: self.ak, + read: self.read, + write: self.write, + } + } +} diff --git a/backend/middlewares/event/src/domain/event_message.rs b/backend/middlewares/event/src/domain/event_message.rs new file mode 100644 index 000000000..9110d64c6 --- /dev/null +++ b/backend/middlewares/event/src/domain/event_message.rs @@ -0,0 +1,116 @@ +use std::collections::HashMap; +use tardis::basic::error::TardisError; +use tardis::chrono::{DateTime, Utc}; +use tardis::db::sea_orm::prelude::*; +use tardis::{basic::result::TardisResult, db::sea_orm}; + +use asteroid_mq::prelude::{ + DurableMessage, EndpointAddr, MaybeBase64Bytes, Message, MessageAckExpectKind, MessageDurableConfig, MessageHeader, MessageId, MessageStatusKind, MessageTargetKind, Subject, + TopicCode, +}; +use tardis::{TardisCreateEntity, TardisEmptyBehavior, TardisEmptyRelation}; +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, TardisCreateEntity, TardisEmptyBehavior, TardisEmptyRelation)] +#[sea_orm(table_name = "mq_message")] +pub struct Model { + #[sea_orm(primary_key)] + pub message_id: String, + #[sea_orm(indexed)] + pub topic: String, + #[sea_orm(indexed)] + pub archived: bool, + pub ack_kind: i16, + pub target_kind: i16, + #[sea_orm(column_type = "DateTime")] + pub expire_time: Option>, + pub max_receiver: Option, + pub subjects: Vec, + pub payload: Vec, + pub status: Vec, + #[sea_orm(column_type = "DateTime")] + pub time: DateTime, +} + +const EP_ADDR_SIZE: usize = size_of::(); +const STATUS_SIZE: usize = 1; +const ENTRY_SIZE: usize = EP_ADDR_SIZE + STATUS_SIZE; +impl Model { + pub fn status_update(&mut self, mut status: HashMap) { + for entry in self.status.chunks_mut(ENTRY_SIZE) { + if entry.len() != ENTRY_SIZE { + break; + } + let (addr, kind) = entry.split_at_mut(EP_ADDR_SIZE); + let mut addr_bytes = [0u8; EP_ADDR_SIZE]; + addr_bytes.copy_from_slice(addr); + + let addr = EndpointAddr::from(addr_bytes); + if let Some(update_kind) = status.remove(&addr) { + kind[0] = update_kind as u8; + } + } + for (addr, kind) in status { + self.status.extend(addr.bytes); + self.status.push(kind as u8); + } + } + pub fn status_to_binary(status: HashMap) -> Vec { + let mut status_vec = Vec::new(); + for (addr, kind) in status { + status_vec.extend(addr.bytes); + status_vec.push(kind as u8); + } + status_vec + } + pub fn status_from_binary(status: Vec) -> HashMap { + let mut status_map = HashMap::new(); + for entry in status.chunks(ENTRY_SIZE) { + if entry.len() != ENTRY_SIZE { + break; + } + let (addr, kind) = entry.split_at(EP_ADDR_SIZE); + let mut addr_bytes = [0u8; EP_ADDR_SIZE]; + addr_bytes.copy_from_slice(addr); + let addr = EndpointAddr::from(addr_bytes); + let Some(kind) = MessageStatusKind::try_from_u8(kind[0]) else { + continue; + }; + status_map.insert(addr, kind); + } + status_map + } + pub fn from_durable_message(topic: TopicCode, durable_message: DurableMessage) -> Self { + Model { + topic: topic.to_string(), + message_id: durable_message.message.header.message_id.to_base64(), + ack_kind: durable_message.message.header.ack_kind as u8 as i16, + target_kind: durable_message.message.header.target_kind as u8 as i16, + expire_time: durable_message.message.header.durability.as_ref().map(|d| d.expire), + max_receiver: durable_message.message.header.durability.and_then(|d| d.max_receiver.map(|r| r as i32)), + subjects: durable_message.message.header.subjects.iter().map(ToString::to_string).collect(), + payload: durable_message.message.payload.0.to_vec(), + status: Self::status_to_binary(durable_message.status), + time: durable_message.time, + archived: false, + } + } + pub fn try_into_durable_message(self) -> TardisResult { + let message = DurableMessage { + message: Message { + header: MessageHeader { + message_id: MessageId::from_base64(&self.message_id).map_err(|e| TardisError::internal_error(&e.to_string(), "base-64-decode"))?, + ack_kind: MessageAckExpectKind::try_from_u8(self.ack_kind as u8).expect("valid ack kind"), + target_kind: MessageTargetKind::from(self.target_kind as u8), + durability: self.expire_time.map(|expire| MessageDurableConfig { + expire, + max_receiver: self.max_receiver.map(|r| r as u32), + }), + subjects: self.subjects.into_iter().map(Subject::new).collect(), + }, + payload: MaybeBase64Bytes::new(self.payload.into()), + }, + status: Self::status_from_binary(self.status), + time: self.time, + }; + Ok(message) + } +} diff --git a/backend/middlewares/event/src/domain/event_topic.rs b/backend/middlewares/event/src/domain/event_topic.rs index c9bba7c76..e375d835e 100644 --- a/backend/middlewares/event/src/domain/event_topic.rs +++ b/backend/middlewares/event/src/domain/event_topic.rs @@ -14,6 +14,7 @@ pub struct Model { pub topic_code: String, pub overflow_policy: String, pub overflow_size: i32, + pub check_auth: bool, #[fill_ctx] pub own_paths: String, } diff --git a/backend/middlewares/event/src/dto/event_dto.rs b/backend/middlewares/event/src/dto/event_dto.rs index dcfaeeabe..6b4d7be49 100644 --- a/backend/middlewares/event/src/dto/event_dto.rs +++ b/backend/middlewares/event/src/dto/event_dto.rs @@ -17,18 +17,22 @@ pub struct EventTopicConfig { pub topic_code: String, pub overflow_policy: String, pub overflow_size: i32, + pub check_auth: bool, } #[derive(poem_openapi::Object, Serialize, Deserialize, Debug, Clone, FromQueryResult)] - pub struct EventTopicAddOrModifyReq { pub code: String, pub name: String, + #[oai(default)] pub blocking: bool, pub topic_code: String, pub overflow_policy: String, pub overflow_size: i32, + #[oai(default)] + pub check_auth: bool, } + impl EventTopicAddOrModifyReq { pub fn into_topic_config(self) -> TopicConfig { TopicConfig { @@ -44,6 +48,20 @@ impl EventTopicAddOrModifyReq { }), } } + pub fn from_config(config: TopicConfig) -> Self { + Self { + code: config.code.to_string(), + name: config.code.to_string(), + blocking: config.blocking, + topic_code: config.code.to_string(), + overflow_policy: config.overflow_config.as_ref().map_or("".to_string(), |c| match c.policy { + TopicOverflowPolicy::RejectNew => "RejectNew".to_string(), + TopicOverflowPolicy::DropOld => "DropOld".to_string(), + }), + overflow_size: config.overflow_config.as_ref().map_or(0, |c| c.size.get() as i32), + check_auth: false, + } + } } #[derive(poem_openapi::Object, Serialize, Deserialize, Debug, Clone, FromQueryResult)] @@ -55,6 +73,7 @@ pub struct EventTopicInfoResp { pub topic_code: String, pub overflow_policy: String, pub overflow_size: i32, + pub check_auth: bool, } impl EventTopicInfoResp { @@ -105,9 +124,8 @@ pub struct EventListenerRegisterReq { pub subscribe_mode: bool, } #[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] -pub struct EventListenerRegisterResp { - pub ws_addr: String, - pub listener_code: String, +pub struct EventRegisterResp { + pub node_id: String, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -126,3 +144,18 @@ pub struct EventMessageMgrWrap { pub ori_from_avatar: String, pub ori_to_avatars: Option>, } + +#[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] +pub struct TopicAuth { + pub topic: String, + pub ak: String, + pub read: bool, + pub write: bool, +} + +#[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] +pub struct SetTopicAuth { + pub topic: String, + pub read: bool, + pub write: bool, +} diff --git a/backend/middlewares/event/src/event_config.rs b/backend/middlewares/event/src/event_config.rs index 167d589ab..b69aa4199 100644 --- a/backend/middlewares/event/src/event_config.rs +++ b/backend/middlewares/event/src/event_config.rs @@ -1,20 +1,30 @@ +use asteroid_mq::openraft; use bios_basic::rbum::rbum_config::RbumConfig; -use lazy_static::lazy_static; + use serde::{Deserialize, Serialize}; use std::{fmt::Debug, sync::Mutex}; -use tardis::basic::{error::TardisError, result::TardisResult}; +use tardis::{ + basic::{error::TardisError, result::TardisResult}, + tardis_static, +}; + #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(default)] pub struct EventConfig { pub rbum: RbumConfig, pub enable: bool, pub svc: String, - pub event_url: String, - pub base_url: String, - pub event_bus_sk: String, + pub raft: openraft::Config, + // default by 5000ms + pub startup_timeout: u64, + pub durable: bool, pub avatars: Vec, - pub resend_threshold: u32, - pub resend_interval_sec: Option, + pub cluster: Option, +} + +impl EventConfig { + pub const CLUSTER_K8S: &str = "k8s"; + pub const NO_CLUSTER: &str = "singleton"; } impl Default for EventConfig { @@ -22,23 +32,18 @@ impl Default for EventConfig { EventConfig { rbum: Default::default(), enable: false, - base_url: "http://localhost:8080/event".to_string(), svc: "bios".to_string(), avatars: Vec::new(), - event_url: "".to_string(), - event_bus_sk: "".to_string(), - resend_threshold: 3, - resend_interval_sec: None, - } - } -} - -impl EventConfig { - pub fn event_url(&self) -> String { - if self.event_url.ends_with('/') { - self.event_url.clone() - } else { - format!("{}/", self.event_url) + startup_timeout: 5000, + durable: true, + cluster: Some(Self::CLUSTER_K8S.to_string()), + raft: openraft::Config { + cluster_name: "bios".to_string(), + election_timeout_max: 1000, + election_timeout_min: 500, + heartbeat_interval: 100, + ..Default::default() + }, } } } @@ -49,16 +54,16 @@ pub struct EventInfo { pub domain_id: String, } -lazy_static! { - static ref EVENT_INFO: Mutex> = Mutex::new(None); +tardis_static! { + pub event_info: Mutex>; } pub struct EventInfoManager; impl EventInfoManager { - pub fn set(event_info: EventInfo) -> TardisResult<()> { - let mut conf = EVENT_INFO.lock().map_err(|e| TardisError::internal_error(&format!("{e:?}"), ""))?; - *conf = Some(event_info); + pub fn set(new_event_info: EventInfo) -> TardisResult<()> { + let mut conf = event_info().lock().map_err(|e| TardisError::internal_error(&format!("{e:?}"), ""))?; + *conf = Some(new_event_info); Ok(()) } @@ -66,7 +71,7 @@ impl EventInfoManager { where F: Fn(&EventInfo) -> T, { - let conf = EVENT_INFO.lock().unwrap_or_else(|e| panic!("event info lock error: {e:?}")); + let conf = event_info().lock().unwrap_or_else(|e| panic!("event info lock error: {e:?}")); let conf = conf.as_ref().unwrap_or_else(|| panic!("rbum config not set")); fun(conf) } diff --git a/backend/middlewares/event/src/event_initializer.rs b/backend/middlewares/event/src/event_initializer.rs index e5183f74c..47d7d2c85 100644 --- a/backend/middlewares/event/src/event_initializer.rs +++ b/backend/middlewares/event/src/event_initializer.rs @@ -1,6 +1,12 @@ -use std::vec; +use std::{sync::Arc, time::Duration, vec}; -use asteroid_mq::{prelude::TopicConfig, protocol::topic::durable_message::LoadTopic}; +use asteroid_mq::{ + prelude::{DurableService, Node, NodeConfig, NodeId, TopicConfig}, + protocol::node::{ + edge::auth::EdgeAuthService, + raft::cluster::{K8sClusterProvider, StaticClusterProvider}, + }, +}; use bios_basic::rbum::{ dto::{rbum_domain_dto::RbumDomainAddReq, rbum_kind_dto::RbumKindAddReq}, rbum_enumeration::RbumScopeLevelKind, @@ -15,10 +21,14 @@ use tardis::{ }; use crate::{ - api::{event_listener_api, event_proc_api, event_topic_api}, - domain::event_topic, + api::{ + ca::{event_connect_api, event_register_api}, + ci::event_topic_api, + }, + domain::{event_message, event_topic}, event_config::{EventConfig, EventInfo, EventInfoManager}, event_constants::{DOMAIN_CODE, KIND_CODE}, + mq_adapter::{BiosDurableAdapter, BiosEdgeAuthAdapter}, }; pub async fn init(web_server: &TardisWebServer) -> TardisResult<()> { @@ -39,7 +49,8 @@ pub async fn init(web_server: &TardisWebServer) -> TardisResult<()> { funs.commit().await?; if config.enable { - init_mq_cluster(&config.svc).await?; + let funs = TardisFuns::inst_with_db_conn(DOMAIN_CODE.to_string(), None); + init_mq_cluster(&config, funs, ctx).await?; } Ok(()) } @@ -52,7 +63,9 @@ async fn init_db(domain_code: String, kind_code: String, funs: &TardisFunsInst, } // Initialize event component RBUM item table and indexs - funs.db().init(event_topic::ActiveModel::init(TardisFuns::reldb().backend(), None, TardisFuns::reldb().compatible_type())).await?; + let _ = funs.db().init(event_topic::ActiveModel::init(TardisFuns::reldb().backend(), None, TardisFuns::reldb().compatible_type())).await; + let _ = funs.db().init(event_message::ActiveModel::init(TardisFuns::reldb().backend(), None, TardisFuns::reldb().compatible_type())).await; + // funs.db() // .init(event_persistent::ActiveModel::init( // TardisFuns::reldb().backend(), @@ -98,36 +111,76 @@ async fn init_api(web_server: &TardisWebServer) -> TardisResult<()> { web_server .add_module( DOMAIN_CODE, - (event_topic_api::EventTopicApi, event_proc_api::EventProcApi, event_listener_api::EventListenerApi), + ( + event_topic_api::EventTopicApi, + event_connect_api::EventConnectApi::default(), + event_register_api::EventRegisterApi::default(), + ), ) .await; Ok(()) } -async fn init_mq_cluster(svc_name: &str) -> TardisResult<()> { +async fn init_mq_cluster(config: &EventConfig, funs: TardisFunsInst, ctx: TardisContext) -> TardisResult<()> { use bios_sdk_invoke::clients::event_client::mq_error; - let mq_node = init_mq_node(svc_name).await; - mq_node - .load_topic(LoadTopic { - config: TopicConfig { - code: SPI_RPC_TOPIC, - overflow_config: None, - blocking: false, - }, - queue: vec![], - }) - .await - .map_err(mq_error)?; + let mq_node = init_mq_node(config, funs, ctx).await; + mq_node.load_from_durable_service().await.map_err(mq_error)?; + // it's important to ensure the SPI_RPC_TOPIC is created, many other components depend on it + if mq_node.get_topic(&SPI_RPC_TOPIC).is_none() { + mq_node + .load_topic( + TopicConfig { + code: SPI_RPC_TOPIC, + overflow_config: None, + blocking: false, + }, + vec![], + ) + .await + .map_err(mq_error)?; + } Ok(()) } -pub async fn init_mq_node(svc_name: &str) -> asteroid_mq::prelude::Node { +pub async fn init_mq_node(config: &EventConfig, funs: TardisFunsInst, ctx: TardisContext) -> asteroid_mq::prelude::Node { + let timeout = Duration::from_secs(config.startup_timeout); + const ENV_POD_UID: &str = "POD_UID"; if let Some(node) = TardisFuns::store().get_singleton::() { node } else { - let cluster_provider = asteroid_mq::protocol::cluster::k8s::K8sClusterProvider::new(svc_name.to_string(), asteroid_mq::DEFAULT_TCP_PORT).await; - let uid = std::env::var("POD_UID").expect("POD_UID is required"); - let node = cluster_provider.run(uid).await.expect("failed to run k8s cluster"); + let funs = Arc::new(funs); + let node = match config.cluster.as_deref() { + Some(EventConfig::CLUSTER_K8S) => { + let uid = std::env::var(ENV_POD_UID).expect("POD_UID is required"); + let node = Node::new(NodeConfig { + id: NodeId::sha256(uid.as_bytes()), + raft: config.raft.clone(), + durable: config.durable.then_some(DurableService::new(BiosDurableAdapter::new(funs.clone(), ctx.clone()))), + edge_auth: Some(EdgeAuthService::new(BiosEdgeAuthAdapter::new(funs.clone(), ctx))), + ..Default::default() + }); + let cluster_provider = K8sClusterProvider::new(config.svc.clone(), asteroid_mq::DEFAULT_TCP_PORT).await; + node.init_raft(cluster_provider).await.expect("fail to init raft"); + node + } + Some(EventConfig::NO_CLUSTER) | None => { + let node = Node::new(NodeConfig { + id: NodeId::snowflake(), + raft: config.raft.clone(), + durable: config.durable.then_some(DurableService::new(BiosDurableAdapter::new(funs.clone(), ctx.clone()))), + edge_auth: Some(EdgeAuthService::new(BiosEdgeAuthAdapter::new(funs.clone(), ctx))), + ..Default::default() + }); + // singleton mode + let cluster_provider = StaticClusterProvider::singleton(node.config()); + node.init_raft(cluster_provider).await.expect("fail to init raft"); + node + } + Some(unknown_cluster) => { + panic!("unknown cluster provider {unknown_cluster}") + } + }; + node.raft().await.wait(Some(timeout)).metrics(|rm| rm.state.is_leader() || rm.state.is_follower(), "raft ready").await.expect("fail to wait raft ready"); TardisFuns::store().insert_singleton(node.clone()); node } diff --git a/backend/middlewares/event/src/lib.rs b/backend/middlewares/event/src/lib.rs index 235acfdd1..327a5583e 100644 --- a/backend/middlewares/event/src/lib.rs +++ b/backend/middlewares/event/src/lib.rs @@ -6,4 +6,5 @@ pub mod dto; pub mod event_config; pub mod event_constants; pub mod event_initializer; +pub mod mq_adapter; mod serv; diff --git a/backend/middlewares/event/src/mq_adapter.rs b/backend/middlewares/event/src/mq_adapter.rs new file mode 100644 index 000000000..b3f485473 --- /dev/null +++ b/backend/middlewares/event/src/mq_adapter.rs @@ -0,0 +1,143 @@ +//! The adapter layer between mq and bios services +use std::sync::Arc; + +use asteroid_mq::{ + prelude::{Durable, DurableError, NodeId}, + protocol::node::edge::{ + auth::{EdgeAuth, EdgeAuthError}, + EdgeRequestEnum, + }, +}; +use bios_basic::rbum::serv::rbum_item_serv::RbumItemCrudOperation; +use tardis::{basic::dto::TardisContext, TardisFunsInst}; + +use crate::{ + dto::event_dto::{EventTopicAddOrModifyReq, EventTopicFilterReq}, + serv::{event_auth_serv::EventAuthServ, event_message_serv::EventMessageServ, event_register_serv::EventRegisterServ, event_topic_serv::EventTopicServ}, +}; + +/* +Durable Service Adapter +*/ +pub struct BiosDurableAdapter { + funs: Arc, + message_serv: EventMessageServ, + ctx: TardisContext, +} + +impl BiosDurableAdapter { + const CONTEXT: &str = "bios durable adapter"; + pub fn new(funs: Arc, ctx: TardisContext) -> Self { + Self { + funs, + message_serv: EventMessageServ, + ctx, + } + } +} + +impl Durable for BiosDurableAdapter { + async fn save(&self, topic: asteroid_mq::prelude::TopicCode, message: asteroid_mq::prelude::DurableMessage) -> Result<(), DurableError> { + self.message_serv.save(topic, message, &self.funs).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e)) + } + async fn archive(&self, topic: asteroid_mq::prelude::TopicCode, message_id: asteroid_mq::prelude::MessageId) -> Result<(), asteroid_mq::prelude::DurableError> { + self.message_serv.archive(topic, message_id, &self.funs).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e)) + } + async fn batch_retrieve( + &self, + topic: asteroid_mq::prelude::TopicCode, + query: asteroid_mq::protocol::topic::durable_message::DurableMessageQuery, + ) -> Result, asteroid_mq::prelude::DurableError> { + self.message_serv.batch_retrieve(topic, query, &self.funs).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e)) + } + async fn retrieve( + &self, + topic: asteroid_mq::prelude::TopicCode, + message_id: asteroid_mq::prelude::MessageId, + ) -> Result { + self.message_serv.retrieve(topic, message_id, &self.funs).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e)) + } + async fn update_status( + &self, + topic: asteroid_mq::prelude::TopicCode, + update: asteroid_mq::protocol::node::raft::proposal::MessageStateUpdate, + ) -> Result<(), asteroid_mq::prelude::DurableError> { + self.message_serv.update_status(topic, update, &self.funs).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e)) + } + async fn create_topic(&self, config: asteroid_mq::prelude::TopicConfig) -> Result<(), asteroid_mq::prelude::DurableError> { + let mut req = EventTopicAddOrModifyReq::from_config(config); + EventTopicServ::add_item(&mut req, &self.funs, &self.ctx).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e))?; + Ok(()) + } + async fn delete_topic(&self, topic: asteroid_mq::prelude::TopicCode) -> Result<(), asteroid_mq::prelude::DurableError> { + EventTopicServ::delete_item(&topic.to_string(), &self.funs, &self.ctx).await.map_err(|e| DurableError::with_source(Self::CONTEXT, e))?; + Ok(()) + } + async fn topic_code_list(&self) -> Result, DurableError> { + let ids = EventTopicServ::find_id_items(&EventTopicFilterReq { ..Default::default() }, None, None, &self.funs, &self.ctx) + .await + .map_err(|e| DurableError::with_source(Self::CONTEXT, e))?; + Ok(ids.into_iter().map(asteroid_mq::prelude::TopicCode::from).collect()) + } + async fn topic_list(&self) -> Result, DurableError> { + let items = EventTopicServ::find_items(&EventTopicFilterReq { ..Default::default() }, None, None, &self.funs, &self.ctx) + .await + .map_err(|e| DurableError::with_source(Self::CONTEXT, e))?; + Ok(items.into_iter().map(|item| item.into_topic_config()).collect()) + } +} + +/* +Auth Service Adapter +*/ + +pub struct BiosEdgeAuthAdapter { + funs: Arc, + ctx: TardisContext, + auth_serv: EventAuthServ, + register_serv: EventRegisterServ, +} + +impl BiosEdgeAuthAdapter { + pub fn new(funs: Arc, ctx: TardisContext) -> Self { + Self { + funs, + ctx, + auth_serv: EventAuthServ {}, + register_serv: EventRegisterServ {}, + } + } +} + +impl EdgeAuth for BiosEdgeAuthAdapter { + async fn check<'r>(&'r self, from: NodeId, request: &'r EdgeRequestEnum) -> Result<(), EdgeAuthError> { + let funs = &self.funs; + let ctx = &self.ctx; + enum CheckOption { + Write, + Read, + } + + let (topic, check_option) = match request { + EdgeRequestEnum::SendMessage(edge_message) => (edge_message.header.topic.clone(), CheckOption::Write), + EdgeRequestEnum::EndpointOnline(edge_endpoint_online) => (edge_endpoint_online.topic_code.clone(), CheckOption::Read), + EdgeRequestEnum::EndpointOffline(edge_endpoint_offline) => (edge_endpoint_offline.topic_code.clone(), CheckOption::Read), + EdgeRequestEnum::EndpointInterest(endpoint_interest) => (endpoint_interest.topic_code.clone(), CheckOption::Read), + EdgeRequestEnum::SetState(set_state) => (set_state.topic.clone(), CheckOption::Read), + }; + if !EventTopicServ::is_check_auth(&topic, funs, ctx).await.map_err(|e| EdgeAuthError::new("topic not found", e))? { + return Ok(()); + } + let ctx = self.register_serv.get_ctx(from).await.map_err(|e| EdgeAuthError::new("node_id not registered", e))?; + let auth = self.auth_serv.get_auth(topic, &ctx.ak, funs).await.map_err(|e| EdgeAuthError::new("auth not found", e))?; + + if match check_option { + CheckOption::Write => auth.write, + CheckOption::Read => auth.read, + } { + Ok(()) + } else { + Err(EdgeAuthError::new_local("no write permission")) + } + } +} diff --git a/backend/middlewares/event/src/serv.rs b/backend/middlewares/event/src/serv.rs index eddea19ba..c8ac2f26c 100644 --- a/backend/middlewares/event/src/serv.rs +++ b/backend/middlewares/event/src/serv.rs @@ -1,3 +1,5 @@ -pub mod event_listener_serv; -pub mod event_proc_serv; +pub mod event_auth_serv; +pub mod event_connect_serv; +pub mod event_message_serv; +pub mod event_register_serv; pub mod event_topic_serv; diff --git a/backend/middlewares/event/src/serv/event_auth_serv.rs b/backend/middlewares/event/src/serv/event_auth_serv.rs new file mode 100644 index 000000000..a77615133 --- /dev/null +++ b/backend/middlewares/event/src/serv/event_auth_serv.rs @@ -0,0 +1,39 @@ +use asteroid_mq::prelude::TopicCode; +use tardis::{ + basic::{error::TardisError, result::TardisResult}, + db::sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter}, + TardisFunsInst, +}; + +use crate::{ + domain::event_auth::{ActiveModel, Column, Entity, Model}, + dto::event_dto::TopicAuth, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + +pub struct EventAuthServ {} + +impl EventAuthServ { + pub fn new() -> EventAuthServ { + EventAuthServ {} + } + + pub async fn set_auth(&self, auth: TopicAuth, funs: &TardisFunsInst) -> TardisResult<()> { + let model: Model = Model::from_topic_auth(auth); + let model: ActiveModel = model.into_active_model(); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + let _result = model.insert(raw_conn).await?; + Ok(()) + } + + pub async fn get_auth(&self, topic: TopicCode, ak: &str, funs: &TardisFunsInst) -> TardisResult { + let select = Entity::find().filter(Column::Topic.eq(topic.to_string())).filter(Column::Ak.eq(ak)); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + let model = select.one(raw_conn).await?; + let model = model.ok_or_else(|| TardisError::not_found(&format!("auth for topic {} and ak {} not found", topic, ak), "event-auth-not-found"))?; + Ok(model.into_topic_auth()) + } +} diff --git a/backend/middlewares/event/src/serv/event_connect_serv.rs b/backend/middlewares/event/src/serv/event_connect_serv.rs new file mode 100644 index 000000000..28841f536 --- /dev/null +++ b/backend/middlewares/event/src/serv/event_connect_serv.rs @@ -0,0 +1,80 @@ +use std::task::ready; + +use asteroid_mq::protocol::node::edge::{ + codec::CodecKind, + connection::{NodeConnection, NodeConnectionError, NodeConnectionErrorKind}, + packet::EdgePacket, +}; +use tardis::{ + futures::{Sink, Stream}, + log as tracing, + web::poem::web::websocket::{Message, WebSocketStream}, +}; +pin_project_lite::pin_project! { + pub struct PoemWs { + #[pin] + inner: WebSocketStream, + } +} +impl PoemWs { + pub fn new(inner: WebSocketStream) -> Self { + Self { inner } + } +} +impl Sink for PoemWs { + type Error = NodeConnectionError; + + fn poll_ready(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll> { + self.project().inner.poll_ready(cx).map_err(|e| NodeConnectionError::new(NodeConnectionErrorKind::Underlying(Box::new(e)), "web socket poll ready failed")) + } + + fn start_send(self: std::pin::Pin<&mut Self>, item: EdgePacket) -> Result<(), Self::Error> { + self.project() + .inner + .start_send(Message::Binary(item.payload.to_vec())) + .map_err(|e| NodeConnectionError::new(NodeConnectionErrorKind::Underlying(Box::new(e)), "web socket start send failed")) + } + + fn poll_flush(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll> { + self.project().inner.poll_flush(cx).map_err(|e| NodeConnectionError::new(NodeConnectionErrorKind::Underlying(Box::new(e)), "web socket poll flush failed")) + } + + fn poll_close(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll> { + self.project().inner.poll_close(cx).map_err(|e| NodeConnectionError::new(NodeConnectionErrorKind::Underlying(Box::new(e)), "web socket poll close failed")) + } +} + +impl Stream for PoemWs { + type Item = Result; + + fn poll_next(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll> { + let next = ready!(self.project().inner.poll_next(cx)); + match next { + Some(Ok(Message::Binary(data))) => { + let packet = EdgePacket::new(CodecKind::JSON, data); + std::task::Poll::Ready(Some(Ok(packet))) + } + Some(Ok(Message::Text(data))) => { + let packet = EdgePacket::new(CodecKind::JSON, data); + std::task::Poll::Ready(Some(Ok(packet))) + } + Some(Ok(Message::Close(_))) => { + tracing::debug!("received close message"); + std::task::Poll::Ready(None) + } + Some(Ok(p)) => { + tracing::debug!(?p, "unexpected message type"); + // immediately wake up the task to poll next + cx.waker().wake_by_ref(); + std::task::Poll::Pending + } + Some(Err(e)) => std::task::Poll::Ready(Some(Err(NodeConnectionError::new( + NodeConnectionErrorKind::Underlying(Box::new(e)), + "web socket poll next failed", + )))), + None => std::task::Poll::Ready(None), + } + } +} + +impl NodeConnection for PoemWs {} diff --git a/backend/middlewares/event/src/serv/event_listener_serv.rs b/backend/middlewares/event/src/serv/event_listener_serv.rs deleted file mode 100644 index 8b1378917..000000000 --- a/backend/middlewares/event/src/serv/event_listener_serv.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/backend/middlewares/event/src/serv/event_message_serv.rs b/backend/middlewares/event/src/serv/event_message_serv.rs new file mode 100644 index 000000000..3c568603a --- /dev/null +++ b/backend/middlewares/event/src/serv/event_message_serv.rs @@ -0,0 +1,69 @@ +use asteroid_mq::{ + prelude::{DurableMessage, MessageId, TopicCode}, + protocol::{node::raft::proposal::MessageStateUpdate, topic::durable_message::DurableMessageQuery}, +}; +use tardis::{ + basic::{error::TardisError, result::TardisResult}, + db::sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QuerySelect, Set, Unchanged}, + TardisFunsInst, +}; + +use crate::domain::event_message::{ActiveModel, Column, Entity, Model}; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EventMessageServ; + +impl EventMessageServ { + pub async fn save(&self, topic: TopicCode, message: DurableMessage, funs: &TardisFunsInst) -> TardisResult<()> { + let model: Model = Model::from_durable_message(topic, message); + let model: ActiveModel = model.into_active_model(); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + let _result = model.insert(raw_conn).await?; + Ok(()) + } + pub async fn archive(&self, topic: TopicCode, message_id: MessageId, funs: &TardisFunsInst) -> TardisResult<()> { + let update = Entity::update(ActiveModel { + message_id: Unchanged(message_id.to_base64()), + archived: Set(true), + ..Default::default() + }) + .filter(Column::Topic.eq(topic.to_string())); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + update.exec(raw_conn).await?; + Ok(()) + } + pub async fn batch_retrieve(&self, topic: TopicCode, query: DurableMessageQuery, funs: &TardisFunsInst) -> TardisResult> { + let DurableMessageQuery { limit, offset, .. } = query; + let select = Entity::find().filter(Column::Archived.eq(false)).filter(Column::Topic.eq(topic.to_string())).limit(Some(limit as u64)).offset(Some(offset as u64)); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + let models = select.all(raw_conn).await?; + models.into_iter().map(|model| model.try_into_durable_message()).collect::>>() + } + pub async fn retrieve(&self, topic: TopicCode, message_id: MessageId, funs: &TardisFunsInst) -> TardisResult { + let select = Entity::find().filter(Column::Archived.eq(false)).filter(Column::Topic.eq(topic.to_string())).filter(Column::MessageId.eq(message_id.to_base64())); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + let model = select.one(raw_conn).await?; + model.ok_or_else(|| TardisError::not_found(&format!("event message {} not found", message_id), "event-message-not-found"))?.try_into_durable_message() + } + pub async fn update_status(&self, topic: TopicCode, update: MessageStateUpdate, funs: &TardisFunsInst) -> TardisResult<()> { + let MessageStateUpdate { message_id, status, .. } = update; + let select = Entity::find().filter(Column::Archived.eq(false)).filter(Column::Topic.eq(topic.to_string())).filter(Column::MessageId.eq(message_id.to_base64())); + let conn = funs.reldb().conn(); + let raw_conn = conn.raw_conn(); + let model = select.one(raw_conn).await?; + let mut model = model.ok_or_else(|| TardisError::not_found(&format!("event message {} not found", message_id), "event-message-not-found"))?; + model.status_update(status); + Entity::update(ActiveModel { + message_id: Unchanged(message_id.to_base64()), + status: Set(model.status), + ..Default::default() + }) + .filter(Column::Topic.eq(topic.to_string())) + .exec(raw_conn) + .await?; + Ok(()) + } +} diff --git a/backend/middlewares/event/src/serv/event_proc_serv.rs b/backend/middlewares/event/src/serv/event_proc_serv.rs deleted file mode 100644 index 8b1378917..000000000 --- a/backend/middlewares/event/src/serv/event_proc_serv.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/backend/middlewares/event/src/serv/event_register_serv.rs b/backend/middlewares/event/src/serv/event_register_serv.rs new file mode 100644 index 000000000..11aea5287 --- /dev/null +++ b/backend/middlewares/event/src/serv/event_register_serv.rs @@ -0,0 +1,28 @@ +use std::collections::HashMap; + +use asteroid_mq::prelude::NodeId; +use tardis::tokio::sync::RwLock; +use tardis::{ + basic::{dto::TardisContext, error::TardisError, result::TardisResult}, + tardis_static, +}; +#[derive(Clone, Default, Debug)] +pub struct EventRegisterServ {} +tardis_static! { + #[inline] + pub nid_ctx_map: RwLock>; +} +impl EventRegisterServ { + pub async fn register_ctx(&self, ctx: &TardisContext) -> TardisResult { + let id = NodeId::snowflake(); + nid_ctx_map().write().await.insert(id, ctx.clone()); + Ok(id) + } + pub async fn unregister_ctx(&self, id: NodeId) -> TardisResult<()> { + nid_ctx_map().write().await.remove(&id); + Ok(()) + } + pub async fn get_ctx(&self, id: NodeId) -> TardisResult { + nid_ctx_map().read().await.get(&id).cloned().ok_or_else(|| TardisError::not_found("node not found", "event-node-not-found")) + } +} diff --git a/backend/middlewares/event/src/serv/event_topic_serv.rs b/backend/middlewares/event/src/serv/event_topic_serv.rs index f55dbc352..d41b0d1ac 100644 --- a/backend/middlewares/event/src/serv/event_topic_serv.rs +++ b/backend/middlewares/event/src/serv/event_topic_serv.rs @@ -1,4 +1,10 @@ +use std::collections::HashMap; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use asteroid_mq::prelude::TopicCode; use async_trait::async_trait; +use bios_basic::rbum::dto::rbum_filer_dto::RbumBasicFilterReq; use bios_basic::rbum::dto::rbum_item_dto::{RbumItemKernelAddReq, RbumItemKernelModifyReq}; use bios_basic::rbum::rbum_enumeration::RbumScopeLevelKind; use bios_basic::rbum::serv::rbum_item_serv::RbumItemCrudOperation; @@ -8,17 +14,20 @@ use tardis::basic::error::TardisError; use tardis::basic::result::TardisResult; use tardis::db::sea_orm::sea_query::SelectStatement; use tardis::db::sea_orm::{EntityName, Set}; +use tardis::tokio::sync::RwLock; use tardis::TardisFunsInst; use crate::domain::event_topic; -use crate::dto::event_dto::{EventTopicAddOrModifyReq, EventTopicFilterReq, EventTopicInfoResp}; +use crate::dto::event_dto::{EventTopicAddOrModifyReq, EventTopicFilterReq, EventTopicInfoResp, SetTopicAuth, TopicAuth}; use crate::event_config::EventInfoManager; -pub struct EventDefServ; +use super::event_auth_serv::EventAuthServ; + +pub struct EventTopicServ; #[async_trait] impl RbumItemCrudOperation - for EventDefServ + for EventTopicServ { fn get_ext_table_name() -> &'static str { event_topic::Entity.table_name() @@ -48,6 +57,7 @@ impl RbumItemCrudOperation TardisResult<()> { let _key = add_req.code.to_string(); let value = Self::get_item(id, &EventTopicFilterReq::default(), funs, ctx).await?; - mq_node().new_topic(value.into_topic_config()).await.map_err(|e| TardisError::internal_error(&e.to_string(), "event-fail-to-create-topic"))?; + mq_node().create_new_topic(value.into_topic_config()).await.map_err(|e| TardisError::internal_error(&e.to_string(), "event-fail-to-create-topic"))?; Ok(()) } @@ -73,6 +83,7 @@ impl RbumItemCrudOperation TardisResult { + const EXPIRE_DURATION: Duration = Duration::from_secs(60); + tardis::tardis_static! { + cache: Arc>>; + } + let now = Instant::now(); + // try query from cache + if let Some((expire, check_auth)) = cache().read().await.get(code) { + if *expire > now { + return Ok(*check_auth); + } + } + let resp = Self::find_one_item( + &EventTopicFilterReq { + basic: RbumBasicFilterReq { + code: Some(code.to_string()), + ..Default::default() + }, + }, + funs, + ctx, + ) + .await? + .ok_or_else(|| TardisError::not_found("topic not found", "event-topic-not-found"))?; + let expire = now + EXPIRE_DURATION; + cache().write().await.insert(code.clone(), (expire, resp.check_auth)); + Ok(resp.check_auth) + } pub async fn init(funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { // let defs = Self::find_items(&EventTopicFilterReq::default(), None, None, funs, ctx).await?; + Ok(()) + } + pub async fn register_user(set_topic_auth: SetTopicAuth, funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { + EventAuthServ::new() + .set_auth( + TopicAuth { + topic: set_topic_auth.topic, + ak: ctx.own_paths.clone(), + read: set_topic_auth.read, + write: set_topic_auth.write, + }, + funs, + ) + .await?; + Ok(()) } } diff --git a/backend/middlewares/event/test-vue/.gitignore b/backend/middlewares/event/test-vue/.gitignore deleted file mode 100644 index 38adffa64..000000000 --- a/backend/middlewares/event/test-vue/.gitignore +++ /dev/null @@ -1,28 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - -node_modules -.DS_Store -dist -dist-ssr -coverage -*.local - -/cypress/videos/ -/cypress/screenshots/ - -# Editor directories and files -.vscode/* -!.vscode/extensions.json -.idea -*.suo -*.ntvs* -*.njsproj -*.sln -*.sw? diff --git a/backend/middlewares/event/test-vue/env.d.ts b/backend/middlewares/event/test-vue/env.d.ts deleted file mode 100644 index 11f02fe2a..000000000 --- a/backend/middlewares/event/test-vue/env.d.ts +++ /dev/null @@ -1 +0,0 @@ -/// diff --git a/backend/middlewares/event/test-vue/index.html b/backend/middlewares/event/test-vue/index.html deleted file mode 100644 index e2a59a91c..000000000 --- a/backend/middlewares/event/test-vue/index.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - Vite App - - -
- - - diff --git a/backend/middlewares/event/test-vue/package.json b/backend/middlewares/event/test-vue/package.json deleted file mode 100644 index 70c797121..000000000 --- a/backend/middlewares/event/test-vue/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "event-test", - "version": "0.0.0", - "private": true, - "scripts": { - "dev": "vite --force --host 0.0.0.0", - "build": "run-p type-check build-only", - "preview": "vite preview", - "build-only": "vite build", - "type-check": "vue-tsc --noEmit" - }, - "dependencies": { - "axios": "^1.6.0", - "vue": "^3.2.47", - "vue-router": "^4.1.6" - }, - "devDependencies": { - "@types/node": "^18.14.2", - "@vitejs/plugin-vue": "^4.0.0", - "@vue/tsconfig": "^0.1.3", - "npm-run-all": "^4.1.5", - "typescript": "~4.8.4", - "vite": "^4.5.2", - "vue-tsc": "^1.2.0", - "vite-plugin-top-level-await": "^1.3.0", - "vite-plugin-wasm": "^3.2.2" - } -} \ No newline at end of file diff --git a/backend/middlewares/event/test-vue/src/App.vue b/backend/middlewares/event/test-vue/src/App.vue deleted file mode 100644 index 70d8f2b4c..000000000 --- a/backend/middlewares/event/test-vue/src/App.vue +++ /dev/null @@ -1,33 +0,0 @@ - - - - - diff --git a/backend/middlewares/event/test-vue/src/main.ts b/backend/middlewares/event/test-vue/src/main.ts deleted file mode 100644 index 9cfe6dfb7..000000000 --- a/backend/middlewares/event/test-vue/src/main.ts +++ /dev/null @@ -1,54 +0,0 @@ -// import * as bios from "bios-enhance-wasm" -import { createApp } from 'vue' -import App from './App.vue' -import router from './router' - -// console.log('--------Init---------'); -// await bios.main('', { -// "strict_security_mode": false, -// "pub_key": "02fbba662032fd384079b7824c07ec8eeaac615187e27ce6a58fcd1597105c1065", -// "double_auth_exp_sec": 60, -// "apis": [ -// { -// "action": "get", -// "uri": "iam/ct/need_crypto_req/**", -// "need_crypto_req": true, -// "need_crypto_resp": false, -// "need_double_auth": false -// }, -// { -// "action": "get", -// "uri": "iam/ct/need_crypto_resp/**", -// "need_crypto_req": false, -// "need_crypto_resp": true, -// "need_double_auth": false -// }, -// { -// "action": "get", -// "uri": "iam/cs/**", -// "need_crypto_req": true, -// "need_crypto_resp": true, -// "need_double_auth": false -// }, -// { -// "action": "get", -// "uri": "iam/ct/need_double_auth/**", -// "need_crypto_req": false, -// "need_crypto_resp": false, -// "need_double_auth": true -// }], -// "login_req_method": "put", -// "login_req_paths": ["iam/cp/login/userpwd"], -// "logout_req_method": "delete", -// "logout_req_path": "iam/cp/logout/", -// "double_auth_req_method": "put", -// "double_auth_req_path": "iam/cp/login/check", -// }); - -// console.log('--------Init END---------' + bios.encrypt("test data")); - -const app = createApp(App) - -app.use(router) - -app.mount('#app') diff --git a/backend/middlewares/event/test-vue/src/request/http.ts b/backend/middlewares/event/test-vue/src/request/http.ts deleted file mode 100644 index e961bab1b..000000000 --- a/backend/middlewares/event/test-vue/src/request/http.ts +++ /dev/null @@ -1,33 +0,0 @@ -import axios from "axios"; -// import * as bios from "bios-enhance-wasm"; - -const apiClient = axios.create({ - headers: { - "Content-type": "application/json", - }, -}); - -apiClient.interceptors.request.use( - function (config) { - return config; - }, - function (error) { - return Promise.reject(error); - } -); - -apiClient.interceptors.response.use( - function (response) { - console.log("======" + JSON.stringify(response)); - const url = new URL(response.config.url!); - const path = url.pathname + (url.search != "" ? url.search : ""); - if (typeof response.data !== "undefined" && response.data !== "") { - } - return response; - }, - function (error) { - return Promise.reject(error); - } -); - -export default apiClient; diff --git a/backend/middlewares/event/test-vue/src/router/index.ts b/backend/middlewares/event/test-vue/src/router/index.ts deleted file mode 100644 index 768a6e638..000000000 --- a/backend/middlewares/event/test-vue/src/router/index.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { createRouter, createWebHistory } from "vue-router"; -// import * as bios from "bios-enhance-wasm"; -import EventRegister from "../views/EventRegister.vue"; - -const router = createRouter({ - history: createWebHistory(import.meta.env.BASE_URL), - routes: [ - { - path: "/", - name: "eventRegister", - component: EventRegister, - }, - ], -}); - -router.beforeEach((to, from) => { - if (to.matched.length === 0) { - try { - // const fullPath = bios.decrypt(to.fullPath.substring(1)); - router.replace({ path: to.fullPath.substring(1) }); - } catch (ignore) {} - } - return true; -}); - -export default router; diff --git a/backend/middlewares/event/test-vue/src/views/EventRegister.vue b/backend/middlewares/event/test-vue/src/views/EventRegister.vue deleted file mode 100644 index 6534a00e3..000000000 --- a/backend/middlewares/event/test-vue/src/views/EventRegister.vue +++ /dev/null @@ -1,253 +0,0 @@ - - - - diff --git a/backend/middlewares/event/test-vue/src/vue-router.mjs_modify b/backend/middlewares/event/test-vue/src/vue-router.mjs_modify deleted file mode 100644 index 2f7839c78..000000000 --- a/backend/middlewares/event/test-vue/src/vue-router.mjs_modify +++ /dev/null @@ -1,3619 +0,0 @@ -/*! - * vue-router v4.1.6 - * (c) 2022 Eduardo San Martin Morote - * @license MIT - */ -import { getCurrentInstance, inject, onUnmounted, onDeactivated, onActivated, computed, unref, watchEffect, defineComponent, reactive, h, provide, ref, watch, shallowRef, nextTick } from 'vue'; -import { setupDevtoolsPlugin } from '@vue/devtools-api'; -import * as bios from "bios-enhance-wasm" - -const isBrowser = typeof window !== 'undefined'; - -function isESModule(obj) { - return obj.__esModule || obj[Symbol.toStringTag] === 'Module'; -} -const assign = Object.assign; -function applyToParams(fn, params) { - const newParams = {}; - for (const key in params) { - const value = params[key]; - newParams[key] = isArray(value) - ? value.map(fn) - : fn(value); - } - return newParams; -} -const noop = () => { }; -/** - * Typesafe alternative to Array.isArray - * https://github.com/microsoft/TypeScript/pull/48228 - */ -const isArray = Array.isArray; - -function warn(msg) { - // avoid using ...args as it breaks in older Edge builds - const args = Array.from(arguments).slice(1); - console.warn.apply(console, ['[Vue Router warn]: ' + msg].concat(args)); -} - -const TRAILING_SLASH_RE = /\/$/; -const removeTrailingSlash = (path) => path.replace(TRAILING_SLASH_RE, ''); -/** - * Transforms a URI into a normalized history location - * - * @param parseQuery - * @param location - URI to normalize - * @param currentLocation - current absolute location. Allows resolving relative - * paths. Must start with `/`. Defaults to `/` - * @returns a normalized history location - */ -function parseURL(parseQuery, location, currentLocation = '/') { - let path, query = {}, searchString = '', hash = ''; - // Could use URL and URLSearchParams but IE 11 doesn't support it - // TODO: move to new URL() - const hashPos = location.indexOf('#'); - let searchPos = location.indexOf('?'); - // the hash appears before the search, so it's not part of the search string - if (hashPos < searchPos && hashPos >= 0) { - searchPos = -1; - } - if (searchPos > -1) { - path = location.slice(0, searchPos); - searchString = location.slice(searchPos + 1, hashPos > -1 ? hashPos : location.length); - query = parseQuery(searchString); - } - if (hashPos > -1) { - path = path || location.slice(0, hashPos); - // keep the # character - hash = location.slice(hashPos, location.length); - } - // no search and no query - path = resolveRelativePath(path != null ? path : location, currentLocation); - // empty path means a relative query or hash `?foo=f`, `#thing` - return { - fullPath: path + (searchString && '?') + searchString + hash, - path, - query, - hash, - }; -} -/** - * Stringifies a URL object - * - * @param stringifyQuery - * @param location - */ -function stringifyURL(stringifyQuery, location) { - const query = location.query ? stringifyQuery(location.query) : ''; - return location.path + (query && '?') + query + (location.hash || ''); -} -/** - * Strips off the base from the beginning of a location.pathname in a non-case-sensitive way. - * - * @param pathname - location.pathname - * @param base - base to strip off - */ -function stripBase(pathname, base) { - // no base or base is not found at the beginning - if (!base || !pathname.toLowerCase().startsWith(base.toLowerCase())) - return pathname; - return pathname.slice(base.length) || '/'; -} -/** - * Checks if two RouteLocation are equal. This means that both locations are - * pointing towards the same {@link RouteRecord} and that all `params`, `query` - * parameters and `hash` are the same - * - * @param a - first {@link RouteLocation} - * @param b - second {@link RouteLocation} - */ -function isSameRouteLocation(stringifyQuery, a, b) { - const aLastIndex = a.matched.length - 1; - const bLastIndex = b.matched.length - 1; - return (aLastIndex > -1 && - aLastIndex === bLastIndex && - isSameRouteRecord(a.matched[aLastIndex], b.matched[bLastIndex]) && - isSameRouteLocationParams(a.params, b.params) && - stringifyQuery(a.query) === stringifyQuery(b.query) && - a.hash === b.hash); -} -/** - * Check if two `RouteRecords` are equal. Takes into account aliases: they are - * considered equal to the `RouteRecord` they are aliasing. - * - * @param a - first {@link RouteRecord} - * @param b - second {@link RouteRecord} - */ -function isSameRouteRecord(a, b) { - // since the original record has an undefined value for aliasOf - // but all aliases point to the original record, this will always compare - // the original record - return (a.aliasOf || a) === (b.aliasOf || b); -} -function isSameRouteLocationParams(a, b) { - if (Object.keys(a).length !== Object.keys(b).length) - return false; - for (const key in a) { - if (!isSameRouteLocationParamsValue(a[key], b[key])) - return false; - } - return true; -} -function isSameRouteLocationParamsValue(a, b) { - return isArray(a) - ? isEquivalentArray(a, b) - : isArray(b) - ? isEquivalentArray(b, a) - : a === b; -} -/** - * Check if two arrays are the same or if an array with one single entry is the - * same as another primitive value. Used to check query and parameters - * - * @param a - array of values - * @param b - array of values or a single value - */ -function isEquivalentArray(a, b) { - return isArray(b) - ? a.length === b.length && a.every((value, i) => value === b[i]) - : a.length === 1 && a[0] === b; -} -/** - * Resolves a relative path that starts with `.`. - * - * @param to - path location we are resolving - * @param from - currentLocation.path, should start with `/` - */ -function resolveRelativePath(to, from) { - if (to.startsWith('/')) - return to; - if ((process.env.NODE_ENV !== 'production') && !from.startsWith('/')) { - warn(`Cannot resolve a relative location without an absolute path. Trying to resolve "${to}" from "${from}". It should look like "/${from}".`); - return to; - } - if (!to) - return from; - const fromSegments = from.split('/'); - const toSegments = to.split('/'); - let position = fromSegments.length - 1; - let toPosition; - let segment; - for (toPosition = 0; toPosition < toSegments.length; toPosition++) { - segment = toSegments[toPosition]; - // we stay on the same position - if (segment === '.') - continue; - // go up in the from array - if (segment === '..') { - // we can't go below zero, but we still need to increment toPosition - if (position > 1) - position--; - // continue - } - // we reached a non-relative path, we stop here - else - break; - } - return (fromSegments.slice(0, position).join('/') + - '/' + - toSegments - // ensure we use at least the last element in the toSegments - .slice(toPosition - (toPosition === toSegments.length ? 1 : 0)) - .join('/')); -} - -var NavigationType; -(function (NavigationType) { - NavigationType["pop"] = "pop"; - NavigationType["push"] = "push"; -})(NavigationType || (NavigationType = {})); -var NavigationDirection; -(function (NavigationDirection) { - NavigationDirection["back"] = "back"; - NavigationDirection["forward"] = "forward"; - NavigationDirection["unknown"] = ""; -})(NavigationDirection || (NavigationDirection = {})); -/** - * Starting location for Histories - */ -const START = ''; -// Generic utils -/** - * Normalizes a base by removing any trailing slash and reading the base tag if - * present. - * - * @param base - base to normalize - */ -function normalizeBase(base) { - if (!base) { - if (isBrowser) { - // respect tag - const baseEl = document.querySelector('base'); - base = (baseEl && baseEl.getAttribute('href')) || '/'; - // strip full URL origin - base = base.replace(/^\w+:\/\/[^\/]+/, ''); - } - else { - base = '/'; - } - } - // ensure leading slash when it was removed by the regex above avoid leading - // slash with hash because the file could be read from the disk like file:// - // and the leading slash would cause problems - if (base[0] !== '/' && base[0] !== '#') - base = '/' + base; - // remove the trailing slash so all other method can just do `base + fullPath` - // to build an href - return removeTrailingSlash(base); -} -// remove any character before the hash -const BEFORE_HASH_RE = /^[^#]+#/; -function createHref(base, location) { - return base.replace(BEFORE_HASH_RE, '#') + location; -} - -function getElementPosition(el, offset) { - const docRect = document.documentElement.getBoundingClientRect(); - const elRect = el.getBoundingClientRect(); - return { - behavior: offset.behavior, - left: elRect.left - docRect.left - (offset.left || 0), - top: elRect.top - docRect.top - (offset.top || 0), - }; -} -const computeScrollPosition = () => ({ - left: window.pageXOffset, - top: window.pageYOffset, -}); -function scrollToPosition(position) { - let scrollToOptions; - if ('el' in position) { - const positionEl = position.el; - const isIdSelector = typeof positionEl === 'string' && positionEl.startsWith('#'); - /** - * `id`s can accept pretty much any characters, including CSS combinators - * like `>` or `~`. It's still possible to retrieve elements using - * `document.getElementById('~')` but it needs to be escaped when using - * `document.querySelector('#\\~')` for it to be valid. The only - * requirements for `id`s are them to be unique on the page and to not be - * empty (`id=""`). Because of that, when passing an id selector, it should - * be properly escaped for it to work with `querySelector`. We could check - * for the id selector to be simple (no CSS combinators `+ >~`) but that - * would make things inconsistent since they are valid characters for an - * `id` but would need to be escaped when using `querySelector`, breaking - * their usage and ending up in no selector returned. Selectors need to be - * escaped: - * - * - `#1-thing` becomes `#\31 -thing` - * - `#with~symbols` becomes `#with\\~symbols` - * - * - More information about the topic can be found at - * https://mathiasbynens.be/notes/html5-id-class. - * - Practical example: https://mathiasbynens.be/demo/html5-id - */ - if ((process.env.NODE_ENV !== 'production') && typeof position.el === 'string') { - if (!isIdSelector || !document.getElementById(position.el.slice(1))) { - try { - const foundEl = document.querySelector(position.el); - if (isIdSelector && foundEl) { - warn(`The selector "${position.el}" should be passed as "el: document.querySelector('${position.el}')" because it starts with "#".`); - // return to avoid other warnings - return; - } - } - catch (err) { - warn(`The selector "${position.el}" is invalid. If you are using an id selector, make sure to escape it. You can find more information about escaping characters in selectors at https://mathiasbynens.be/notes/css-escapes or use CSS.escape (https://developer.mozilla.org/en-US/docs/Web/API/CSS/escape).`); - // return to avoid other warnings - return; - } - } - } - const el = typeof positionEl === 'string' - ? isIdSelector - ? document.getElementById(positionEl.slice(1)) - : document.querySelector(positionEl) - : positionEl; - if (!el) { - (process.env.NODE_ENV !== 'production') && - warn(`Couldn't find element using selector "${position.el}" returned by scrollBehavior.`); - return; - } - scrollToOptions = getElementPosition(el, position); - } - else { - scrollToOptions = position; - } - if ('scrollBehavior' in document.documentElement.style) - window.scrollTo(scrollToOptions); - else { - window.scrollTo(scrollToOptions.left != null ? scrollToOptions.left : window.pageXOffset, scrollToOptions.top != null ? scrollToOptions.top : window.pageYOffset); - } -} -function getScrollKey(path, delta) { - const position = history.state ? history.state.position - delta : -1; - return position + path; -} -const scrollPositions = new Map(); -function saveScrollPosition(key, scrollPosition) { - scrollPositions.set(key, scrollPosition); -} -function getSavedScrollPosition(key) { - const scroll = scrollPositions.get(key); - // consume it so it's not used again - scrollPositions.delete(key); - return scroll; -} -// TODO: RFC about how to save scroll position -/** - * ScrollBehavior instance used by the router to compute and restore the scroll - * position when navigating. - */ -// export interface ScrollHandler { -// // returns a scroll position that can be saved in history -// compute(): ScrollPositionEntry -// // can take an extended ScrollPositionEntry -// scroll(position: ScrollPosition): void -// } -// export const scrollHandler: ScrollHandler = { -// compute: computeScroll, -// scroll: scrollToPosition, -// } - -let createBaseLocation = () => location.protocol + '//' + location.host; -/** - * Creates a normalized history location from a window.location object - * @param location - - */ -function createCurrentLocation(base, location) { - const { pathname, search, hash } = location; - // allows hash bases like #, /#, #/, #!, #!/, /#!/, or even /folder#end - const hashPos = base.indexOf('#'); - if (hashPos > -1) { - let slicePos = hash.includes(base.slice(hashPos)) - ? base.slice(hashPos).length - : 1; - let pathFromHash = hash.slice(slicePos); - // prepend the starting slash to hash so the url starts with /# - if (pathFromHash[0] !== '/') - pathFromHash = '/' + pathFromHash; - return stripBase(pathFromHash, ''); - } - const path = stripBase(pathname, base); - return path + search + hash; -} -function useHistoryListeners(base, historyState, currentLocation, replace) { - let listeners = []; - let teardowns = []; - // TODO: should it be a stack? a Dict. Check if the popstate listener - // can trigger twice - let pauseState = null; - const popStateHandler = ({ state, }) => { - const to = createCurrentLocation(base, location); - const from = currentLocation.value; - const fromState = historyState.value; - let delta = 0; - if (state) { - currentLocation.value = to; - historyState.value = state; - // ignore the popstate and reset the pauseState - if (pauseState && pauseState === from) { - pauseState = null; - return; - } - delta = fromState ? state.position - fromState.position : 0; - } - else { - replace(to); - } - // console.log({ deltaFromCurrent }) - // Here we could also revert the navigation by calling history.go(-delta) - // this listener will have to be adapted to not trigger again and to wait for the url - // to be updated before triggering the listeners. Some kind of validation function would also - // need to be passed to the listeners so the navigation can be accepted - // call all listeners - listeners.forEach(listener => { - listener(currentLocation.value, from, { - delta, - type: NavigationType.pop, - direction: delta - ? delta > 0 - ? NavigationDirection.forward - : NavigationDirection.back - : NavigationDirection.unknown, - }); - }); - }; - function pauseListeners() { - pauseState = currentLocation.value; - } - function listen(callback) { - // set up the listener and prepare teardown callbacks - listeners.push(callback); - const teardown = () => { - const index = listeners.indexOf(callback); - if (index > -1) - listeners.splice(index, 1); - }; - teardowns.push(teardown); - return teardown; - } - function beforeUnloadListener() { - const { history } = window; - if (!history.state) - return; - history.replaceState(assign({}, history.state, { scroll: computeScrollPosition() }), ''); - } - function destroy() { - for (const teardown of teardowns) - teardown(); - teardowns = []; - window.removeEventListener('popstate', popStateHandler); - window.removeEventListener('beforeunload', beforeUnloadListener); - } - // set up the listeners and prepare teardown callbacks - window.addEventListener('popstate', popStateHandler); - window.addEventListener('beforeunload', beforeUnloadListener); - return { - pauseListeners, - listen, - destroy, - }; -} -/** - * Creates a state object - */ -function buildState(back, current, forward, replaced = false, computeScroll = false) { - return { - back, - current, - forward, - replaced, - position: window.history.length, - scroll: computeScroll ? computeScrollPosition() : null, - }; -} -function useHistoryStateNavigation(base) { - const { history, location } = window; - // private variables - const currentLocation = { - value: createCurrentLocation(base, location), - }; - const historyState = { value: history.state }; - // build current history entry as this is a fresh navigation - if (!historyState.value) { - changeLocation(currentLocation.value, { - back: null, - current: currentLocation.value, - forward: null, - // the length is off by one, we need to decrease it - position: history.length - 1, - replaced: true, - // don't add a scroll as the user may have an anchor, and we want - // scrollBehavior to be triggered without a saved position - scroll: null, - }, true); - } - function changeLocation(to, state, replace) { - /** - * if a base tag is provided, and we are on a normal domain, we have to - * respect the provided `base` attribute because pushState() will use it and - * potentially erase anything before the `#` like at - * https://github.com/vuejs/router/issues/685 where a base of - * `/folder/#` but a base of `/` would erase the `/folder/` section. If - * there is no host, the `` tag makes no sense and if there isn't a - * base tag we can just use everything after the `#`. - */ - // if(state.back!=null ){ - // to = to == '/' ? to : '/' + bios.encrypt(to) - // } - if (to != '/') { - try { - to = '/' + bios.encrypt(to) - } catch (ignore) { } - } - const hashIndex = base.indexOf('#'); - const url = hashIndex > -1 - ? (location.host && document.querySelector('base') - ? base - : base.slice(hashIndex)) + to - : createBaseLocation() + base + to; - try { - // BROWSER QUIRK - // NOTE: Safari throws a SecurityError when calling this function 100 times in 30 seconds - history[replace ? 'replaceState' : 'pushState'](state, '', url); - historyState.value = state; - } - catch (err) { - if ((process.env.NODE_ENV !== 'production')) { - warn('Error with push/replace State', err); - } - else { - console.error(err); - } - // Force the navigation, this also resets the call count - location[replace ? 'replace' : 'assign'](url); - } - } - function replace(to, data) { - const state = assign({}, history.state, buildState(historyState.value.back, - // keep back and forward entries but override current position - to, historyState.value.forward, true), data, { position: historyState.value.position }); - changeLocation(to, state, true); - currentLocation.value = to; - } - function push(to, data) { - // Add to current entry the information of where we are going - // as well as saving the current position - const currentState = assign({}, - // use current history state to gracefully handle a wrong call to - // history.replaceState - // https://github.com/vuejs/router/issues/366 - historyState.value, history.state, { - forward: to, - scroll: computeScrollPosition(), - }); - if ((process.env.NODE_ENV !== 'production') && !history.state) { - warn(`history.state seems to have been manually replaced without preserving the necessary values. Make sure to preserve existing history state if you are manually calling history.replaceState:\n\n` + - `history.replaceState(history.state, '', url)\n\n` + - `You can find more information at https://next.router.vuejs.org/guide/migration/#usage-of-history-state.`); - } - changeLocation(currentState.current, currentState, true); - const state = assign({}, buildState(currentLocation.value, to, null), { position: currentState.position + 1 }, data); - changeLocation(to, state, false); - currentLocation.value = to; - } - return { - location: currentLocation, - state: historyState, - push, - replace, - }; -} -/** - * Creates an HTML5 history. Most common history for single page applications. - * - * @param base - - */ -function createWebHistory(base) { - base = normalizeBase(base); - const historyNavigation = useHistoryStateNavigation(base); - const historyListeners = useHistoryListeners(base, historyNavigation.state, historyNavigation.location, historyNavigation.replace); - function go(delta, triggerListeners = true) { - if (!triggerListeners) - historyListeners.pauseListeners(); - history.go(delta); - } - const routerHistory = assign({ - // it's overridden right after - location: '', - base, - go, - createHref: createHref.bind(null, base), - }, historyNavigation, historyListeners); - Object.defineProperty(routerHistory, 'location', { - enumerable: true, - get: () => historyNavigation.location.value, - }); - Object.defineProperty(routerHistory, 'state', { - enumerable: true, - get: () => historyNavigation.state.value, - }); - return routerHistory; -} - -/** - * Creates an in-memory based history. The main purpose of this history is to handle SSR. It starts in a special location that is nowhere. - * It's up to the user to replace that location with the starter location by either calling `router.push` or `router.replace`. - * - * @param base - Base applied to all urls, defaults to '/' - * @returns a history object that can be passed to the router constructor - */ -function createMemoryHistory(base = '') { - let listeners = []; - let queue = [START]; - let position = 0; - base = normalizeBase(base); - function setLocation(location) { - position++; - if (position === queue.length) { - // we are at the end, we can simply append a new entry - queue.push(location); - } - else { - // we are in the middle, we remove everything from here in the queue - queue.splice(position); - queue.push(location); - } - } - function triggerListeners(to, from, { direction, delta }) { - const info = { - direction, - delta, - type: NavigationType.pop, - }; - for (const callback of listeners) { - callback(to, from, info); - } - } - const routerHistory = { - // rewritten by Object.defineProperty - location: START, - // TODO: should be kept in queue - state: {}, - base, - createHref: createHref.bind(null, base), - replace(to) { - // remove current entry and decrement position - queue.splice(position--, 1); - setLocation(to); - }, - push(to, data) { - setLocation(to); - }, - listen(callback) { - listeners.push(callback); - return () => { - const index = listeners.indexOf(callback); - if (index > -1) - listeners.splice(index, 1); - }; - }, - destroy() { - listeners = []; - queue = [START]; - position = 0; - }, - go(delta, shouldTrigger = true) { - const from = this.location; - const direction = - // we are considering delta === 0 going forward, but in abstract mode - // using 0 for the delta doesn't make sense like it does in html5 where - // it reloads the page - delta < 0 ? NavigationDirection.back : NavigationDirection.forward; - position = Math.max(0, Math.min(position + delta, queue.length - 1)); - if (shouldTrigger) { - triggerListeners(this.location, from, { - direction, - delta, - }); - } - }, - }; - Object.defineProperty(routerHistory, 'location', { - enumerable: true, - get: () => queue[position], - }); - return routerHistory; -} - -/** - * Creates a hash history. Useful for web applications with no host (e.g. `file://`) or when configuring a server to - * handle any URL is not possible. - * - * @param base - optional base to provide. Defaults to `location.pathname + location.search` If there is a `` tag - * in the `head`, its value will be ignored in favor of this parameter **but note it affects all the history.pushState() - * calls**, meaning that if you use a `` tag, it's `href` value **has to match this parameter** (ignoring anything - * after the `#`). - * - * @example - * ```js - * // at https://example.com/folder - * createWebHashHistory() // gives a url of `https://example.com/folder#` - * createWebHashHistory('/folder/') // gives a url of `https://example.com/folder/#` - * // if the `#` is provided in the base, it won't be added by `createWebHashHistory` - * createWebHashHistory('/folder/#/app/') // gives a url of `https://example.com/folder/#/app/` - * // you should avoid doing this because it changes the original url and breaks copying urls - * createWebHashHistory('/other-folder/') // gives a url of `https://example.com/other-folder/#` - * - * // at file:///usr/etc/folder/index.html - * // for locations with no `host`, the base is ignored - * createWebHashHistory('/iAmIgnored') // gives a url of `file:///usr/etc/folder/index.html#` - * ``` - */ -function createWebHashHistory(base) { - // Make sure this implementation is fine in terms of encoding, specially for IE11 - // for `file://`, directly use the pathname and ignore the base - // location.pathname contains an initial `/` even at the root: `https://example.com` - base = location.host ? base || location.pathname + location.search : ''; - // allow the user to provide a `#` in the middle: `/base/#/app` - if (!base.includes('#')) - base += '#'; - if ((process.env.NODE_ENV !== 'production') && !base.endsWith('#/') && !base.endsWith('#')) { - warn(`A hash base must end with a "#":\n"${base}" should be "${base.replace(/#.*$/, '#')}".`); - } - return createWebHistory(base); -} - -function isRouteLocation(route) { - return typeof route === 'string' || (route && typeof route === 'object'); -} -function isRouteName(name) { - return typeof name === 'string' || typeof name === 'symbol'; -} - -/** - * Initial route location where the router is. Can be used in navigation guards - * to differentiate the initial navigation. - * - * @example - * ```js - * import { START_LOCATION } from 'vue-router' - * - * router.beforeEach((to, from) => { - * if (from === START_LOCATION) { - * // initial navigation - * } - * }) - * ``` - */ -const START_LOCATION_NORMALIZED = { - path: '/', - name: undefined, - params: {}, - query: {}, - hash: '', - fullPath: '/', - matched: [], - meta: {}, - redirectedFrom: undefined, -}; - -const NavigationFailureSymbol = Symbol((process.env.NODE_ENV !== 'production') ? 'navigation failure' : ''); -/** - * Enumeration with all possible types for navigation failures. Can be passed to - * {@link isNavigationFailure} to check for specific failures. - */ -var NavigationFailureType; -(function (NavigationFailureType) { - /** - * An aborted navigation is a navigation that failed because a navigation - * guard returned `false` or called `next(false)` - */ - NavigationFailureType[NavigationFailureType["aborted"] = 4] = "aborted"; - /** - * A cancelled navigation is a navigation that failed because a more recent - * navigation finished started (not necessarily finished). - */ - NavigationFailureType[NavigationFailureType["cancelled"] = 8] = "cancelled"; - /** - * A duplicated navigation is a navigation that failed because it was - * initiated while already being at the exact same location. - */ - NavigationFailureType[NavigationFailureType["duplicated"] = 16] = "duplicated"; -})(NavigationFailureType || (NavigationFailureType = {})); -// DEV only debug messages -const ErrorTypeMessages = { - [1 /* ErrorTypes.MATCHER_NOT_FOUND */]({ location, currentLocation }) { - return `No match for\n ${JSON.stringify(location)}${currentLocation - ? '\nwhile being at\n' + JSON.stringify(currentLocation) - : ''}`; - }, - [2 /* ErrorTypes.NAVIGATION_GUARD_REDIRECT */]({ from, to, }) { - return `Redirected from "${from.fullPath}" to "${stringifyRoute(to)}" via a navigation guard.`; - }, - [4 /* ErrorTypes.NAVIGATION_ABORTED */]({ from, to }) { - return `Navigation aborted from "${from.fullPath}" to "${to.fullPath}" via a navigation guard.`; - }, - [8 /* ErrorTypes.NAVIGATION_CANCELLED */]({ from, to }) { - return `Navigation cancelled from "${from.fullPath}" to "${to.fullPath}" with a new navigation.`; - }, - [16 /* ErrorTypes.NAVIGATION_DUPLICATED */]({ from, to }) { - return `Avoided redundant navigation to current location: "${from.fullPath}".`; - }, -}; -function createRouterError(type, params) { - // keep full error messages in cjs versions - if ((process.env.NODE_ENV !== 'production') || !true) { - return assign(new Error(ErrorTypeMessages[type](params)), { - type, - [NavigationFailureSymbol]: true, - }, params); - } - else { - return assign(new Error(), { - type, - [NavigationFailureSymbol]: true, - }, params); - } -} -function isNavigationFailure(error, type) { - return (error instanceof Error && - NavigationFailureSymbol in error && - (type == null || !!(error.type & type))); -} -const propertiesToLog = ['params', 'query', 'hash']; -function stringifyRoute(to) { - if (typeof to === 'string') - return to; - if ('path' in to) - return to.path; - const location = {}; - for (const key of propertiesToLog) { - if (key in to) - location[key] = to[key]; - } - return JSON.stringify(location, null, 2); -} - -// default pattern for a param: non-greedy everything but / -const BASE_PARAM_PATTERN = '[^/]+?'; -const BASE_PATH_PARSER_OPTIONS = { - sensitive: false, - strict: false, - start: true, - end: true, -}; -// Special Regex characters that must be escaped in static tokens -const REGEX_CHARS_RE = /[.+*?^${}()[\]/\\]/g; -/** - * Creates a path parser from an array of Segments (a segment is an array of Tokens) - * - * @param segments - array of segments returned by tokenizePath - * @param extraOptions - optional options for the regexp - * @returns a PathParser - */ -function tokensToParser(segments, extraOptions) { - const options = assign({}, BASE_PATH_PARSER_OPTIONS, extraOptions); - // the amount of scores is the same as the length of segments except for the root segment "/" - const score = []; - // the regexp as a string - let pattern = options.start ? '^' : ''; - // extracted keys - const keys = []; - for (const segment of segments) { - // the root segment needs special treatment - const segmentScores = segment.length ? [] : [90 /* PathScore.Root */]; - // allow trailing slash - if (options.strict && !segment.length) - pattern += '/'; - for (let tokenIndex = 0; tokenIndex < segment.length; tokenIndex++) { - const token = segment[tokenIndex]; - // resets the score if we are inside a sub-segment /:a-other-:b - let subSegmentScore = 40 /* PathScore.Segment */ + - (options.sensitive ? 0.25 /* PathScore.BonusCaseSensitive */ : 0); - if (token.type === 0 /* TokenType.Static */) { - // prepend the slash if we are starting a new segment - if (!tokenIndex) - pattern += '/'; - pattern += token.value.replace(REGEX_CHARS_RE, '\\$&'); - subSegmentScore += 40 /* PathScore.Static */; - } - else if (token.type === 1 /* TokenType.Param */) { - const { value, repeatable, optional, regexp } = token; - keys.push({ - name: value, - repeatable, - optional, - }); - const re = regexp ? regexp : BASE_PARAM_PATTERN; - // the user provided a custom regexp /:id(\\d+) - if (re !== BASE_PARAM_PATTERN) { - subSegmentScore += 10 /* PathScore.BonusCustomRegExp */; - // make sure the regexp is valid before using it - try { - new RegExp(`(${re})`); - } - catch (err) { - throw new Error(`Invalid custom RegExp for param "${value}" (${re}): ` + - err.message); - } - } - // when we repeat we must take care of the repeating leading slash - let subPattern = repeatable ? `((?:${re})(?:/(?:${re}))*)` : `(${re})`; - // prepend the slash if we are starting a new segment - if (!tokenIndex) - subPattern = - // avoid an optional / if there are more segments e.g. /:p?-static - // or /:p?-:p2 - optional && segment.length < 2 - ? `(?:/${subPattern})` - : '/' + subPattern; - if (optional) - subPattern += '?'; - pattern += subPattern; - subSegmentScore += 20 /* PathScore.Dynamic */; - if (optional) - subSegmentScore += -8 /* PathScore.BonusOptional */; - if (repeatable) - subSegmentScore += -20 /* PathScore.BonusRepeatable */; - if (re === '.*') - subSegmentScore += -50 /* PathScore.BonusWildcard */; - } - segmentScores.push(subSegmentScore); - } - // an empty array like /home/ -> [[{home}], []] - // if (!segment.length) pattern += '/' - score.push(segmentScores); - } - // only apply the strict bonus to the last score - if (options.strict && options.end) { - const i = score.length - 1; - score[i][score[i].length - 1] += 0.7000000000000001 /* PathScore.BonusStrict */; - } - // TODO: dev only warn double trailing slash - if (!options.strict) - pattern += '/?'; - if (options.end) - pattern += '$'; - // allow paths like /dynamic to only match dynamic or dynamic/... but not dynamic_something_else - else if (options.strict) - pattern += '(?:/|$)'; - const re = new RegExp(pattern, options.sensitive ? '' : 'i'); - function parse(path) { - const match = path.match(re); - const params = {}; - if (!match) - return null; - for (let i = 1; i < match.length; i++) { - const value = match[i] || ''; - const key = keys[i - 1]; - params[key.name] = value && key.repeatable ? value.split('/') : value; - } - return params; - } - function stringify(params) { - let path = ''; - // for optional parameters to allow to be empty - let avoidDuplicatedSlash = false; - for (const segment of segments) { - if (!avoidDuplicatedSlash || !path.endsWith('/')) - path += '/'; - avoidDuplicatedSlash = false; - for (const token of segment) { - if (token.type === 0 /* TokenType.Static */) { - path += token.value; - } - else if (token.type === 1 /* TokenType.Param */) { - const { value, repeatable, optional } = token; - const param = value in params ? params[value] : ''; - if (isArray(param) && !repeatable) { - throw new Error(`Provided param "${value}" is an array but it is not repeatable (* or + modifiers)`); - } - const text = isArray(param) - ? param.join('/') - : param; - if (!text) { - if (optional) { - // if we have more than one optional param like /:a?-static we don't need to care about the optional param - if (segment.length < 2) { - // remove the last slash as we could be at the end - if (path.endsWith('/')) - path = path.slice(0, -1); - // do not append a slash on the next iteration - else - avoidDuplicatedSlash = true; - } - } - else - throw new Error(`Missing required param "${value}"`); - } - path += text; - } - } - } - // avoid empty path when we have multiple optional params - return path || '/'; - } - return { - re, - score, - keys, - parse, - stringify, - }; -} -/** - * Compares an array of numbers as used in PathParser.score and returns a - * number. This function can be used to `sort` an array - * - * @param a - first array of numbers - * @param b - second array of numbers - * @returns 0 if both are equal, < 0 if a should be sorted first, > 0 if b - * should be sorted first - */ -function compareScoreArray(a, b) { - let i = 0; - while (i < a.length && i < b.length) { - const diff = b[i] - a[i]; - // only keep going if diff === 0 - if (diff) - return diff; - i++; - } - // if the last subsegment was Static, the shorter segments should be sorted first - // otherwise sort the longest segment first - if (a.length < b.length) { - return a.length === 1 && a[0] === 40 /* PathScore.Static */ + 40 /* PathScore.Segment */ - ? -1 - : 1; - } - else if (a.length > b.length) { - return b.length === 1 && b[0] === 40 /* PathScore.Static */ + 40 /* PathScore.Segment */ - ? 1 - : -1; - } - return 0; -} -/** - * Compare function that can be used with `sort` to sort an array of PathParser - * - * @param a - first PathParser - * @param b - second PathParser - * @returns 0 if both are equal, < 0 if a should be sorted first, > 0 if b - */ -function comparePathParserScore(a, b) { - let i = 0; - const aScore = a.score; - const bScore = b.score; - while (i < aScore.length && i < bScore.length) { - const comp = compareScoreArray(aScore[i], bScore[i]); - // do not return if both are equal - if (comp) - return comp; - i++; - } - if (Math.abs(bScore.length - aScore.length) === 1) { - if (isLastScoreNegative(aScore)) - return 1; - if (isLastScoreNegative(bScore)) - return -1; - } - // if a and b share the same score entries but b has more, sort b first - return bScore.length - aScore.length; - // this is the ternary version - // return aScore.length < bScore.length - // ? 1 - // : aScore.length > bScore.length - // ? -1 - // : 0 -} -/** - * This allows detecting splats at the end of a path: /home/:id(.*)* - * - * @param score - score to check - * @returns true if the last entry is negative - */ -function isLastScoreNegative(score) { - const last = score[score.length - 1]; - return score.length > 0 && last[last.length - 1] < 0; -} - -const ROOT_TOKEN = { - type: 0 /* TokenType.Static */, - value: '', -}; -const VALID_PARAM_RE = /[a-zA-Z0-9_]/; -// After some profiling, the cache seems to be unnecessary because tokenizePath -// (the slowest part of adding a route) is very fast -// const tokenCache = new Map() -function tokenizePath(path) { - if (!path) - return [[]]; - if (path === '/') - return [[ROOT_TOKEN]]; - if (!path.startsWith('/')) { - throw new Error((process.env.NODE_ENV !== 'production') - ? `Route paths should start with a "/": "${path}" should be "/${path}".` - : `Invalid path "${path}"`); - } - // if (tokenCache.has(path)) return tokenCache.get(path)! - function crash(message) { - throw new Error(`ERR (${state})/"${buffer}": ${message}`); - } - let state = 0 /* TokenizerState.Static */; - let previousState = state; - const tokens = []; - // the segment will always be valid because we get into the initial state - // with the leading / - let segment; - function finalizeSegment() { - if (segment) - tokens.push(segment); - segment = []; - } - // index on the path - let i = 0; - // char at index - let char; - // buffer of the value read - let buffer = ''; - // custom regexp for a param - let customRe = ''; - function consumeBuffer() { - if (!buffer) - return; - if (state === 0 /* TokenizerState.Static */) { - segment.push({ - type: 0 /* TokenType.Static */, - value: buffer, - }); - } - else if (state === 1 /* TokenizerState.Param */ || - state === 2 /* TokenizerState.ParamRegExp */ || - state === 3 /* TokenizerState.ParamRegExpEnd */) { - if (segment.length > 1 && (char === '*' || char === '+')) - crash(`A repeatable param (${buffer}) must be alone in its segment. eg: '/:ids+.`); - segment.push({ - type: 1 /* TokenType.Param */, - value: buffer, - regexp: customRe, - repeatable: char === '*' || char === '+', - optional: char === '*' || char === '?', - }); - } - else { - crash('Invalid state to consume buffer'); - } - buffer = ''; - } - function addCharToBuffer() { - buffer += char; - } - while (i < path.length) { - char = path[i++]; - if (char === '\\' && state !== 2 /* TokenizerState.ParamRegExp */) { - previousState = state; - state = 4 /* TokenizerState.EscapeNext */; - continue; - } - switch (state) { - case 0 /* TokenizerState.Static */: - if (char === '/') { - if (buffer) { - consumeBuffer(); - } - finalizeSegment(); - } - else if (char === ':') { - consumeBuffer(); - state = 1 /* TokenizerState.Param */; - } - else { - addCharToBuffer(); - } - break; - case 4 /* TokenizerState.EscapeNext */: - addCharToBuffer(); - state = previousState; - break; - case 1 /* TokenizerState.Param */: - if (char === '(') { - state = 2 /* TokenizerState.ParamRegExp */; - } - else if (VALID_PARAM_RE.test(char)) { - addCharToBuffer(); - } - else { - consumeBuffer(); - state = 0 /* TokenizerState.Static */; - // go back one character if we were not modifying - if (char !== '*' && char !== '?' && char !== '+') - i--; - } - break; - case 2 /* TokenizerState.ParamRegExp */: - // TODO: is it worth handling nested regexp? like :p(?:prefix_([^/]+)_suffix) - // it already works by escaping the closing ) - // https://paths.esm.dev/?p=AAMeJbiAwQEcDKbAoAAkP60PG2R6QAvgNaA6AFACM2ABuQBB# - // is this really something people need since you can also write - // /prefix_:p()_suffix - if (char === ')') { - // handle the escaped ) - if (customRe[customRe.length - 1] == '\\') - customRe = customRe.slice(0, -1) + char; - else - state = 3 /* TokenizerState.ParamRegExpEnd */; - } - else { - customRe += char; - } - break; - case 3 /* TokenizerState.ParamRegExpEnd */: - // same as finalizing a param - consumeBuffer(); - state = 0 /* TokenizerState.Static */; - // go back one character if we were not modifying - if (char !== '*' && char !== '?' && char !== '+') - i--; - customRe = ''; - break; - default: - crash('Unknown state'); - break; - } - } - if (state === 2 /* TokenizerState.ParamRegExp */) - crash(`Unfinished custom RegExp for param "${buffer}"`); - consumeBuffer(); - finalizeSegment(); - // tokenCache.set(path, tokens) - return tokens; -} - -function createRouteRecordMatcher(record, parent, options) { - const parser = tokensToParser(tokenizePath(record.path), options); - // warn against params with the same name - if ((process.env.NODE_ENV !== 'production')) { - const existingKeys = new Set(); - for (const key of parser.keys) { - if (existingKeys.has(key.name)) - warn(`Found duplicated params with name "${key.name}" for path "${record.path}". Only the last one will be available on "$route.params".`); - existingKeys.add(key.name); - } - } - const matcher = assign(parser, { - record, - parent, - // these needs to be populated by the parent - children: [], - alias: [], - }); - if (parent) { - // both are aliases or both are not aliases - // we don't want to mix them because the order is used when - // passing originalRecord in Matcher.addRoute - if (!matcher.record.aliasOf === !parent.record.aliasOf) - parent.children.push(matcher); - } - return matcher; -} - -/** - * Creates a Router Matcher. - * - * @internal - * @param routes - array of initial routes - * @param globalOptions - global route options - */ -function createRouterMatcher(routes, globalOptions) { - // normalized ordered array of matchers - const matchers = []; - const matcherMap = new Map(); - globalOptions = mergeOptions({ strict: false, end: true, sensitive: false }, globalOptions); - function getRecordMatcher(name) { - return matcherMap.get(name); - } - function addRoute(record, parent, originalRecord) { - // used later on to remove by name - const isRootAdd = !originalRecord; - const mainNormalizedRecord = normalizeRouteRecord(record); - if ((process.env.NODE_ENV !== 'production')) { - checkChildMissingNameWithEmptyPath(mainNormalizedRecord, parent); - } - // we might be the child of an alias - mainNormalizedRecord.aliasOf = originalRecord && originalRecord.record; - const options = mergeOptions(globalOptions, record); - // generate an array of records to correctly handle aliases - const normalizedRecords = [ - mainNormalizedRecord, - ]; - if ('alias' in record) { - const aliases = typeof record.alias === 'string' ? [record.alias] : record.alias; - for (const alias of aliases) { - normalizedRecords.push(assign({}, mainNormalizedRecord, { - // this allows us to hold a copy of the `components` option - // so that async components cache is hold on the original record - components: originalRecord - ? originalRecord.record.components - : mainNormalizedRecord.components, - path: alias, - // we might be the child of an alias - aliasOf: originalRecord - ? originalRecord.record - : mainNormalizedRecord, - // the aliases are always of the same kind as the original since they - // are defined on the same record - })); - } - } - let matcher; - let originalMatcher; - for (const normalizedRecord of normalizedRecords) { - const { path } = normalizedRecord; - // Build up the path for nested routes if the child isn't an absolute - // route. Only add the / delimiter if the child path isn't empty and if the - // parent path doesn't have a trailing slash - if (parent && path[0] !== '/') { - const parentPath = parent.record.path; - const connectingSlash = parentPath[parentPath.length - 1] === '/' ? '' : '/'; - normalizedRecord.path = - parent.record.path + (path && connectingSlash + path); - } - if ((process.env.NODE_ENV !== 'production') && normalizedRecord.path === '*') { - throw new Error('Catch all routes ("*") must now be defined using a param with a custom regexp.\n' + - 'See more at https://next.router.vuejs.org/guide/migration/#removed-star-or-catch-all-routes.'); - } - // create the object beforehand, so it can be passed to children - matcher = createRouteRecordMatcher(normalizedRecord, parent, options); - if ((process.env.NODE_ENV !== 'production') && parent && path[0] === '/') - checkMissingParamsInAbsolutePath(matcher, parent); - // if we are an alias we must tell the original record that we exist, - // so we can be removed - if (originalRecord) { - originalRecord.alias.push(matcher); - if ((process.env.NODE_ENV !== 'production')) { - checkSameParams(originalRecord, matcher); - } - } - else { - // otherwise, the first record is the original and others are aliases - originalMatcher = originalMatcher || matcher; - if (originalMatcher !== matcher) - originalMatcher.alias.push(matcher); - // remove the route if named and only for the top record (avoid in nested calls) - // this works because the original record is the first one - if (isRootAdd && record.name && !isAliasRecord(matcher)) - removeRoute(record.name); - } - if (mainNormalizedRecord.children) { - const children = mainNormalizedRecord.children; - for (let i = 0; i < children.length; i++) { - addRoute(children[i], matcher, originalRecord && originalRecord.children[i]); - } - } - // if there was no original record, then the first one was not an alias and all - // other aliases (if any) need to reference this record when adding children - originalRecord = originalRecord || matcher; - // TODO: add normalized records for more flexibility - // if (parent && isAliasRecord(originalRecord)) { - // parent.children.push(originalRecord) - // } - // Avoid adding a record that doesn't display anything. This allows passing through records without a component to - // not be reached and pass through the catch all route - if ((matcher.record.components && - Object.keys(matcher.record.components).length) || - matcher.record.name || - matcher.record.redirect) { - insertMatcher(matcher); - } - } - return originalMatcher - ? () => { - // since other matchers are aliases, they should be removed by the original matcher - removeRoute(originalMatcher); - } - : noop; - } - function removeRoute(matcherRef) { - if (isRouteName(matcherRef)) { - const matcher = matcherMap.get(matcherRef); - if (matcher) { - matcherMap.delete(matcherRef); - matchers.splice(matchers.indexOf(matcher), 1); - matcher.children.forEach(removeRoute); - matcher.alias.forEach(removeRoute); - } - } - else { - const index = matchers.indexOf(matcherRef); - if (index > -1) { - matchers.splice(index, 1); - if (matcherRef.record.name) - matcherMap.delete(matcherRef.record.name); - matcherRef.children.forEach(removeRoute); - matcherRef.alias.forEach(removeRoute); - } - } - } - function getRoutes() { - return matchers; - } - function insertMatcher(matcher) { - let i = 0; - while (i < matchers.length && - comparePathParserScore(matcher, matchers[i]) >= 0 && - // Adding children with empty path should still appear before the parent - // https://github.com/vuejs/router/issues/1124 - (matcher.record.path !== matchers[i].record.path || - !isRecordChildOf(matcher, matchers[i]))) - i++; - matchers.splice(i, 0, matcher); - // only add the original record to the name map - if (matcher.record.name && !isAliasRecord(matcher)) - matcherMap.set(matcher.record.name, matcher); - } - function resolve(location, currentLocation) { - let matcher; - let params = {}; - let path; - let name; - if ('name' in location && location.name) { - matcher = matcherMap.get(location.name); - if (!matcher) - throw createRouterError(1 /* ErrorTypes.MATCHER_NOT_FOUND */, { - location, - }); - // warn if the user is passing invalid params so they can debug it better when they get removed - if ((process.env.NODE_ENV !== 'production')) { - const invalidParams = Object.keys(location.params || {}).filter(paramName => !matcher.keys.find(k => k.name === paramName)); - if (invalidParams.length) { - warn(`Discarded invalid param(s) "${invalidParams.join('", "')}" when navigating. See https://github.com/vuejs/router/blob/main/packages/router/CHANGELOG.md#414-2022-08-22 for more details.`); - } - } - name = matcher.record.name; - params = assign( - // paramsFromLocation is a new object - paramsFromLocation(currentLocation.params, - // only keep params that exist in the resolved location - // TODO: only keep optional params coming from a parent record - matcher.keys.filter(k => !k.optional).map(k => k.name)), - // discard any existing params in the current location that do not exist here - // #1497 this ensures better active/exact matching - location.params && - paramsFromLocation(location.params, matcher.keys.map(k => k.name))); - // throws if cannot be stringified - path = matcher.stringify(params); - } - else if ('path' in location) { - // no need to resolve the path with the matcher as it was provided - // this also allows the user to control the encoding - path = location.path; - if ((process.env.NODE_ENV !== 'production') && !path.startsWith('/')) { - warn(`The Matcher cannot resolve relative paths but received "${path}". Unless you directly called \`matcher.resolve("${path}")\`, this is probably a bug in vue-router. Please open an issue at https://new-issue.vuejs.org/?repo=vuejs/router.`); - } - matcher = matchers.find(m => m.re.test(path)); - // matcher should have a value after the loop - if (matcher) { - // we know the matcher works because we tested the regexp - params = matcher.parse(path); - name = matcher.record.name; - } - // location is a relative path - } - else { - // match by name or path of current route - matcher = currentLocation.name - ? matcherMap.get(currentLocation.name) - : matchers.find(m => m.re.test(currentLocation.path)); - if (!matcher) - throw createRouterError(1 /* ErrorTypes.MATCHER_NOT_FOUND */, { - location, - currentLocation, - }); - name = matcher.record.name; - // since we are navigating to the same location, we don't need to pick the - // params like when `name` is provided - params = assign({}, currentLocation.params, location.params); - path = matcher.stringify(params); - } - const matched = []; - let parentMatcher = matcher; - while (parentMatcher) { - // reversed order so parents are at the beginning - matched.unshift(parentMatcher.record); - parentMatcher = parentMatcher.parent; - } - return { - name, - path, - params, - matched, - meta: mergeMetaFields(matched), - }; - } - // add initial routes - routes.forEach(route => addRoute(route)); - return { addRoute, resolve, removeRoute, getRoutes, getRecordMatcher }; -} -function paramsFromLocation(params, keys) { - const newParams = {}; - for (const key of keys) { - if (key in params) - newParams[key] = params[key]; - } - return newParams; -} -/** - * Normalizes a RouteRecordRaw. Creates a copy - * - * @param record - * @returns the normalized version - */ -function normalizeRouteRecord(record) { - return { - path: record.path, - redirect: record.redirect, - name: record.name, - meta: record.meta || {}, - aliasOf: undefined, - beforeEnter: record.beforeEnter, - props: normalizeRecordProps(record), - children: record.children || [], - instances: {}, - leaveGuards: new Set(), - updateGuards: new Set(), - enterCallbacks: {}, - components: 'components' in record - ? record.components || null - : record.component && { default: record.component }, - }; -} -/** - * Normalize the optional `props` in a record to always be an object similar to - * components. Also accept a boolean for components. - * @param record - */ -function normalizeRecordProps(record) { - const propsObject = {}; - // props does not exist on redirect records, but we can set false directly - const props = record.props || false; - if ('component' in record) { - propsObject.default = props; - } - else { - // NOTE: we could also allow a function to be applied to every component. - // Would need user feedback for use cases - for (const name in record.components) - propsObject[name] = typeof props === 'boolean' ? props : props[name]; - } - return propsObject; -} -/** - * Checks if a record or any of its parent is an alias - * @param record - */ -function isAliasRecord(record) { - while (record) { - if (record.record.aliasOf) - return true; - record = record.parent; - } - return false; -} -/** - * Merge meta fields of an array of records - * - * @param matched - array of matched records - */ -function mergeMetaFields(matched) { - return matched.reduce((meta, record) => assign(meta, record.meta), {}); -} -function mergeOptions(defaults, partialOptions) { - const options = {}; - for (const key in defaults) { - options[key] = key in partialOptions ? partialOptions[key] : defaults[key]; - } - return options; -} -function isSameParam(a, b) { - return (a.name === b.name && - a.optional === b.optional && - a.repeatable === b.repeatable); -} -/** - * Check if a path and its alias have the same required params - * - * @param a - original record - * @param b - alias record - */ -function checkSameParams(a, b) { - for (const key of a.keys) { - if (!key.optional && !b.keys.find(isSameParam.bind(null, key))) - return warn(`Alias "${b.record.path}" and the original record: "${a.record.path}" must have the exact same param named "${key.name}"`); - } - for (const key of b.keys) { - if (!key.optional && !a.keys.find(isSameParam.bind(null, key))) - return warn(`Alias "${b.record.path}" and the original record: "${a.record.path}" must have the exact same param named "${key.name}"`); - } -} -/** - * A route with a name and a child with an empty path without a name should warn when adding the route - * - * @param mainNormalizedRecord - RouteRecordNormalized - * @param parent - RouteRecordMatcher - */ -function checkChildMissingNameWithEmptyPath(mainNormalizedRecord, parent) { - if (parent && - parent.record.name && - !mainNormalizedRecord.name && - !mainNormalizedRecord.path) { - warn(`The route named "${String(parent.record.name)}" has a child without a name and an empty path. Using that name won't render the empty path child so you probably want to move the name to the child instead. If this is intentional, add a name to the child route to remove the warning.`); - } -} -function checkMissingParamsInAbsolutePath(record, parent) { - for (const key of parent.keys) { - if (!record.keys.find(isSameParam.bind(null, key))) - return warn(`Absolute path "${record.record.path}" must have the exact same param named "${key.name}" as its parent "${parent.record.path}".`); - } -} -function isRecordChildOf(record, parent) { - return parent.children.some(child => child === record || isRecordChildOf(record, child)); -} - -/** - * Encoding Rules ␣ = Space Path: ␣ " < > # ? { } Query: ␣ " < > # & = Hash: ␣ " - * < > ` - * - * On top of that, the RFC3986 (https://tools.ietf.org/html/rfc3986#section-2.2) - * defines some extra characters to be encoded. Most browsers do not encode them - * in encodeURI https://github.com/whatwg/url/issues/369, so it may be safer to - * also encode `!'()*`. Leaving un-encoded only ASCII alphanumeric(`a-zA-Z0-9`) - * plus `-._~`. This extra safety should be applied to query by patching the - * string returned by encodeURIComponent encodeURI also encodes `[\]^`. `\` - * should be encoded to avoid ambiguity. Browsers (IE, FF, C) transform a `\` - * into a `/` if directly typed in. The _backtick_ (`````) should also be - * encoded everywhere because some browsers like FF encode it when directly - * written while others don't. Safari and IE don't encode ``"<>{}``` in hash. - */ -// const EXTRA_RESERVED_RE = /[!'()*]/g -// const encodeReservedReplacer = (c: string) => '%' + c.charCodeAt(0).toString(16) -const HASH_RE = /#/g; // %23 -const AMPERSAND_RE = /&/g; // %26 -const SLASH_RE = /\//g; // %2F -const EQUAL_RE = /=/g; // %3D -const IM_RE = /\?/g; // %3F -const PLUS_RE = /\+/g; // %2B -/** - * NOTE: It's not clear to me if we should encode the + symbol in queries, it - * seems to be less flexible than not doing so and I can't find out the legacy - * systems requiring this for regular requests like text/html. In the standard, - * the encoding of the plus character is only mentioned for - * application/x-www-form-urlencoded - * (https://url.spec.whatwg.org/#urlencoded-parsing) and most browsers seems lo - * leave the plus character as is in queries. To be more flexible, we allow the - * plus character on the query, but it can also be manually encoded by the user. - * - * Resources: - * - https://url.spec.whatwg.org/#urlencoded-parsing - * - https://stackoverflow.com/questions/1634271/url-encoding-the-space-character-or-20 - */ -const ENC_BRACKET_OPEN_RE = /%5B/g; // [ -const ENC_BRACKET_CLOSE_RE = /%5D/g; // ] -const ENC_CARET_RE = /%5E/g; // ^ -const ENC_BACKTICK_RE = /%60/g; // ` -const ENC_CURLY_OPEN_RE = /%7B/g; // { -const ENC_PIPE_RE = /%7C/g; // | -const ENC_CURLY_CLOSE_RE = /%7D/g; // } -const ENC_SPACE_RE = /%20/g; // } -/** - * Encode characters that need to be encoded on the path, search and hash - * sections of the URL. - * - * @internal - * @param text - string to encode - * @returns encoded string - */ -function commonEncode(text) { - return encodeURI('' + text) - .replace(ENC_PIPE_RE, '|') - .replace(ENC_BRACKET_OPEN_RE, '[') - .replace(ENC_BRACKET_CLOSE_RE, ']'); -} -/** - * Encode characters that need to be encoded on the hash section of the URL. - * - * @param text - string to encode - * @returns encoded string - */ -function encodeHash(text) { - return commonEncode(text) - .replace(ENC_CURLY_OPEN_RE, '{') - .replace(ENC_CURLY_CLOSE_RE, '}') - .replace(ENC_CARET_RE, '^'); -} -/** - * Encode characters that need to be encoded query values on the query - * section of the URL. - * - * @param text - string to encode - * @returns encoded string - */ -function encodeQueryValue(text) { - return (commonEncode(text) - // Encode the space as +, encode the + to differentiate it from the space - .replace(PLUS_RE, '%2B') - .replace(ENC_SPACE_RE, '+') - .replace(HASH_RE, '%23') - .replace(AMPERSAND_RE, '%26') - .replace(ENC_BACKTICK_RE, '`') - .replace(ENC_CURLY_OPEN_RE, '{') - .replace(ENC_CURLY_CLOSE_RE, '}') - .replace(ENC_CARET_RE, '^')); -} -/** - * Like `encodeQueryValue` but also encodes the `=` character. - * - * @param text - string to encode - */ -function encodeQueryKey(text) { - return encodeQueryValue(text).replace(EQUAL_RE, '%3D'); -} -/** - * Encode characters that need to be encoded on the path section of the URL. - * - * @param text - string to encode - * @returns encoded string - */ -function encodePath(text) { - return commonEncode(text).replace(HASH_RE, '%23').replace(IM_RE, '%3F'); -} -/** - * Encode characters that need to be encoded on the path section of the URL as a - * param. This function encodes everything {@link encodePath} does plus the - * slash (`/`) character. If `text` is `null` or `undefined`, returns an empty - * string instead. - * - * @param text - string to encode - * @returns encoded string - */ -function encodeParam(text) { - return text == null ? '' : encodePath(text).replace(SLASH_RE, '%2F'); -} -/** - * Decode text using `decodeURIComponent`. Returns the original text if it - * fails. - * - * @param text - string to decode - * @returns decoded string - */ -function decode(text) { - try { - return decodeURIComponent('' + text); - } - catch (err) { - (process.env.NODE_ENV !== 'production') && warn(`Error decoding "${text}". Using original value`); - } - return '' + text; -} - -/** - * Transforms a queryString into a {@link LocationQuery} object. Accept both, a - * version with the leading `?` and without Should work as URLSearchParams - - * @internal - * - * @param search - search string to parse - * @returns a query object - */ -function parseQuery(search) { - const query = {}; - // avoid creating an object with an empty key and empty value - // because of split('&') - if (search === '' || search === '?') - return query; - const hasLeadingIM = search[0] === '?'; - const searchParams = (hasLeadingIM ? search.slice(1) : search).split('&'); - for (let i = 0; i < searchParams.length; ++i) { - // pre decode the + into space - const searchParam = searchParams[i].replace(PLUS_RE, ' '); - // allow the = character - const eqPos = searchParam.indexOf('='); - const key = decode(eqPos < 0 ? searchParam : searchParam.slice(0, eqPos)); - const value = eqPos < 0 ? null : decode(searchParam.slice(eqPos + 1)); - if (key in query) { - // an extra variable for ts types - let currentValue = query[key]; - if (!isArray(currentValue)) { - currentValue = query[key] = [currentValue]; - } - currentValue.push(value); - } - else { - query[key] = value; - } - } - return query; -} -/** - * Stringifies a {@link LocationQueryRaw} object. Like `URLSearchParams`, it - * doesn't prepend a `?` - * - * @internal - * - * @param query - query object to stringify - * @returns string version of the query without the leading `?` - */ -function stringifyQuery(query) { - let search = ''; - for (let key in query) { - const value = query[key]; - key = encodeQueryKey(key); - if (value == null) { - // only null adds the value - if (value !== undefined) { - search += (search.length ? '&' : '') + key; - } - continue; - } - // keep null values - const values = isArray(value) - ? value.map(v => v && encodeQueryValue(v)) - : [value && encodeQueryValue(value)]; - values.forEach(value => { - // skip undefined values in arrays as if they were not present - // smaller code than using filter - if (value !== undefined) { - // only append & with non-empty search - search += (search.length ? '&' : '') + key; - if (value != null) - search += '=' + value; - } - }); - } - return search; -} -/** - * Transforms a {@link LocationQueryRaw} into a {@link LocationQuery} by casting - * numbers into strings, removing keys with an undefined value and replacing - * undefined with null in arrays - * - * @param query - query object to normalize - * @returns a normalized query object - */ -function normalizeQuery(query) { - const normalizedQuery = {}; - for (const key in query) { - const value = query[key]; - if (value !== undefined) { - normalizedQuery[key] = isArray(value) - ? value.map(v => (v == null ? null : '' + v)) - : value == null - ? value - : '' + value; - } - } - return normalizedQuery; -} - -/** - * RouteRecord being rendered by the closest ancestor Router View. Used for - * `onBeforeRouteUpdate` and `onBeforeRouteLeave`. rvlm stands for Router View - * Location Matched - * - * @internal - */ -const matchedRouteKey = Symbol((process.env.NODE_ENV !== 'production') ? 'router view location matched' : ''); -/** - * Allows overriding the router view depth to control which component in - * `matched` is rendered. rvd stands for Router View Depth - * - * @internal - */ -const viewDepthKey = Symbol((process.env.NODE_ENV !== 'production') ? 'router view depth' : ''); -/** - * Allows overriding the router instance returned by `useRouter` in tests. r - * stands for router - * - * @internal - */ -const routerKey = Symbol((process.env.NODE_ENV !== 'production') ? 'router' : ''); -/** - * Allows overriding the current route returned by `useRoute` in tests. rl - * stands for route location - * - * @internal - */ -const routeLocationKey = Symbol((process.env.NODE_ENV !== 'production') ? 'route location' : ''); -/** - * Allows overriding the current route used by router-view. Internally this is - * used when the `route` prop is passed. - * - * @internal - */ -const routerViewLocationKey = Symbol((process.env.NODE_ENV !== 'production') ? 'router view location' : ''); - -/** - * Create a list of callbacks that can be reset. Used to create before and after navigation guards list - */ -function useCallbacks() { - let handlers = []; - function add(handler) { - handlers.push(handler); - return () => { - const i = handlers.indexOf(handler); - if (i > -1) - handlers.splice(i, 1); - }; - } - function reset() { - handlers = []; - } - return { - add, - list: () => handlers, - reset, - }; -} - -function registerGuard(record, name, guard) { - const removeFromList = () => { - record[name].delete(guard); - }; - onUnmounted(removeFromList); - onDeactivated(removeFromList); - onActivated(() => { - record[name].add(guard); - }); - record[name].add(guard); -} -/** - * Add a navigation guard that triggers whenever the component for the current - * location is about to be left. Similar to {@link beforeRouteLeave} but can be - * used in any component. The guard is removed when the component is unmounted. - * - * @param leaveGuard - {@link NavigationGuard} - */ -function onBeforeRouteLeave(leaveGuard) { - if ((process.env.NODE_ENV !== 'production') && !getCurrentInstance()) { - warn('getCurrentInstance() returned null. onBeforeRouteLeave() must be called at the top of a setup function'); - return; - } - const activeRecord = inject(matchedRouteKey, - // to avoid warning - {}).value; - if (!activeRecord) { - (process.env.NODE_ENV !== 'production') && - warn('No active route record was found when calling `onBeforeRouteLeave()`. Make sure you call this function inside a component child of . Maybe you called it inside of App.vue?'); - return; - } - registerGuard(activeRecord, 'leaveGuards', leaveGuard); -} -/** - * Add a navigation guard that triggers whenever the current location is about - * to be updated. Similar to {@link beforeRouteUpdate} but can be used in any - * component. The guard is removed when the component is unmounted. - * - * @param updateGuard - {@link NavigationGuard} - */ -function onBeforeRouteUpdate(updateGuard) { - if ((process.env.NODE_ENV !== 'production') && !getCurrentInstance()) { - warn('getCurrentInstance() returned null. onBeforeRouteUpdate() must be called at the top of a setup function'); - return; - } - const activeRecord = inject(matchedRouteKey, - // to avoid warning - {}).value; - if (!activeRecord) { - (process.env.NODE_ENV !== 'production') && - warn('No active route record was found when calling `onBeforeRouteUpdate()`. Make sure you call this function inside a component child of . Maybe you called it inside of App.vue?'); - return; - } - registerGuard(activeRecord, 'updateGuards', updateGuard); -} -function guardToPromiseFn(guard, to, from, record, name) { - // keep a reference to the enterCallbackArray to prevent pushing callbacks if a new navigation took place - const enterCallbackArray = record && - // name is defined if record is because of the function overload - (record.enterCallbacks[name] = record.enterCallbacks[name] || []); - return () => new Promise((resolve, reject) => { - const next = (valid) => { - if (valid === false) { - reject(createRouterError(4 /* ErrorTypes.NAVIGATION_ABORTED */, { - from, - to, - })); - } - else if (valid instanceof Error) { - reject(valid); - } - else if (isRouteLocation(valid)) { - reject(createRouterError(2 /* ErrorTypes.NAVIGATION_GUARD_REDIRECT */, { - from: to, - to: valid, - })); - } - else { - if (enterCallbackArray && - // since enterCallbackArray is truthy, both record and name also are - record.enterCallbacks[name] === enterCallbackArray && - typeof valid === 'function') { - enterCallbackArray.push(valid); - } - resolve(); - } - }; - // wrapping with Promise.resolve allows it to work with both async and sync guards - const guardReturn = guard.call(record && record.instances[name], to, from, (process.env.NODE_ENV !== 'production') ? canOnlyBeCalledOnce(next, to, from) : next); - let guardCall = Promise.resolve(guardReturn); - if (guard.length < 3) - guardCall = guardCall.then(next); - if ((process.env.NODE_ENV !== 'production') && guard.length > 2) { - const message = `The "next" callback was never called inside of ${guard.name ? '"' + guard.name + '"' : ''}:\n${guard.toString()}\n. If you are returning a value instead of calling "next", make sure to remove the "next" parameter from your function.`; - if (typeof guardReturn === 'object' && 'then' in guardReturn) { - guardCall = guardCall.then(resolvedValue => { - // @ts-expect-error: _called is added at canOnlyBeCalledOnce - if (!next._called) { - warn(message); - return Promise.reject(new Error('Invalid navigation guard')); - } - return resolvedValue; - }); - } - else if (guardReturn !== undefined) { - // @ts-expect-error: _called is added at canOnlyBeCalledOnce - if (!next._called) { - warn(message); - reject(new Error('Invalid navigation guard')); - return; - } - } - } - guardCall.catch(err => reject(err)); - }); -} -function canOnlyBeCalledOnce(next, to, from) { - let called = 0; - return function () { - if (called++ === 1) - warn(`The "next" callback was called more than once in one navigation guard when going from "${from.fullPath}" to "${to.fullPath}". It should be called exactly one time in each navigation guard. This will fail in production.`); - // @ts-expect-error: we put it in the original one because it's easier to check - next._called = true; - if (called === 1) - next.apply(null, arguments); - }; -} -function extractComponentsGuards(matched, guardType, to, from) { - const guards = []; - for (const record of matched) { - if ((process.env.NODE_ENV !== 'production') && !record.components && !record.children.length) { - warn(`Record with path "${record.path}" is either missing a "component(s)"` + - ` or "children" property.`); - } - for (const name in record.components) { - let rawComponent = record.components[name]; - if ((process.env.NODE_ENV !== 'production')) { - if (!rawComponent || - (typeof rawComponent !== 'object' && - typeof rawComponent !== 'function')) { - warn(`Component "${name}" in record with path "${record.path}" is not` + - ` a valid component. Received "${String(rawComponent)}".`); - // throw to ensure we stop here but warn to ensure the message isn't - // missed by the user - throw new Error('Invalid route component'); - } - else if ('then' in rawComponent) { - // warn if user wrote import('/component.vue') instead of () => - // import('./component.vue') - warn(`Component "${name}" in record with path "${record.path}" is a ` + - `Promise instead of a function that returns a Promise. Did you ` + - `write "import('./MyPage.vue')" instead of ` + - `"() => import('./MyPage.vue')" ? This will break in ` + - `production if not fixed.`); - const promise = rawComponent; - rawComponent = () => promise; - } - else if (rawComponent.__asyncLoader && - // warn only once per component - !rawComponent.__warnedDefineAsync) { - rawComponent.__warnedDefineAsync = true; - warn(`Component "${name}" in record with path "${record.path}" is defined ` + - `using "defineAsyncComponent()". ` + - `Write "() => import('./MyPage.vue')" instead of ` + - `"defineAsyncComponent(() => import('./MyPage.vue'))".`); - } - } - // skip update and leave guards if the route component is not mounted - if (guardType !== 'beforeRouteEnter' && !record.instances[name]) - continue; - if (isRouteComponent(rawComponent)) { - // __vccOpts is added by vue-class-component and contain the regular options - const options = rawComponent.__vccOpts || rawComponent; - const guard = options[guardType]; - guard && guards.push(guardToPromiseFn(guard, to, from, record, name)); - } - else { - // start requesting the chunk already - let componentPromise = rawComponent(); - if ((process.env.NODE_ENV !== 'production') && !('catch' in componentPromise)) { - warn(`Component "${name}" in record with path "${record.path}" is a function that does not return a Promise. If you were passing a functional component, make sure to add a "displayName" to the component. This will break in production if not fixed.`); - componentPromise = Promise.resolve(componentPromise); - } - guards.push(() => componentPromise.then(resolved => { - if (!resolved) - return Promise.reject(new Error(`Couldn't resolve component "${name}" at "${record.path}"`)); - const resolvedComponent = isESModule(resolved) - ? resolved.default - : resolved; - // replace the function with the resolved component - // cannot be null or undefined because we went into the for loop - record.components[name] = resolvedComponent; - // __vccOpts is added by vue-class-component and contain the regular options - const options = resolvedComponent.__vccOpts || resolvedComponent; - const guard = options[guardType]; - return guard && guardToPromiseFn(guard, to, from, record, name)(); - })); - } - } - } - return guards; -} -/** - * Allows differentiating lazy components from functional components and vue-class-component - * @internal - * - * @param component - */ -function isRouteComponent(component) { - return (typeof component === 'object' || - 'displayName' in component || - 'props' in component || - '__vccOpts' in component); -} -/** - * Ensures a route is loaded, so it can be passed as o prop to ``. - * - * @param route - resolved route to load - */ -function loadRouteLocation(route) { - return route.matched.every(record => record.redirect) - ? Promise.reject(new Error('Cannot load a route that redirects.')) - : Promise.all(route.matched.map(record => record.components && - Promise.all(Object.keys(record.components).reduce((promises, name) => { - const rawComponent = record.components[name]; - if (typeof rawComponent === 'function' && - !('displayName' in rawComponent)) { - promises.push(rawComponent().then(resolved => { - if (!resolved) - return Promise.reject(new Error(`Couldn't resolve component "${name}" at "${record.path}". Ensure you passed a function that returns a promise.`)); - const resolvedComponent = isESModule(resolved) - ? resolved.default - : resolved; - // replace the function with the resolved component - // cannot be null or undefined because we went into the for loop - record.components[name] = resolvedComponent; - return; - })); - } - return promises; - }, [])))).then(() => route); -} - -// TODO: we could allow currentRoute as a prop to expose `isActive` and -// `isExactActive` behavior should go through an RFC -function useLink(props) { - const router = inject(routerKey); - const currentRoute = inject(routeLocationKey); - const route = computed(() => router.resolve(unref(props.to))); - const activeRecordIndex = computed(() => { - route.value.href = '/' + bios.encrypt(route.value.href) - const { matched } = route.value; - const { length } = matched; - const routeMatched = matched[length - 1]; - const currentMatched = currentRoute.matched; - if (!routeMatched || !currentMatched.length) - return -1; - const index = currentMatched.findIndex(isSameRouteRecord.bind(null, routeMatched)); - if (index > -1) - return index; - // possible parent record - const parentRecordPath = getOriginalPath(matched[length - 2]); - return ( - // we are dealing with nested routes - length > 1 && - // if the parent and matched route have the same path, this link is - // referring to the empty child. Or we currently are on a different - // child of the same parent - getOriginalPath(routeMatched) === parentRecordPath && - // avoid comparing the child with its parent - currentMatched[currentMatched.length - 1].path !== parentRecordPath - ? currentMatched.findIndex(isSameRouteRecord.bind(null, matched[length - 2])) - : index); - }); - const isActive = computed(() => activeRecordIndex.value > -1 && - includesParams(currentRoute.params, route.value.params)); - const isExactActive = computed(() => activeRecordIndex.value > -1 && - activeRecordIndex.value === currentRoute.matched.length - 1 && - isSameRouteLocationParams(currentRoute.params, route.value.params)); - function navigate(e = {}) { - if (guardEvent(e)) { - return router[unref(props.replace) ? 'replace' : 'push'](unref(props.to) - // avoid uncaught errors are they are logged anyway - ).catch(noop); - } - return Promise.resolve(); - } - // devtools only - if (((process.env.NODE_ENV !== 'production') || __VUE_PROD_DEVTOOLS__) && isBrowser) { - const instance = getCurrentInstance(); - if (instance) { - const linkContextDevtools = { - route: route.value, - isActive: isActive.value, - isExactActive: isExactActive.value, - }; - // @ts-expect-error: this is internal - instance.__vrl_devtools = instance.__vrl_devtools || []; - // @ts-expect-error: this is internal - instance.__vrl_devtools.push(linkContextDevtools); - watchEffect(() => { - linkContextDevtools.route = route.value; - linkContextDevtools.isActive = isActive.value; - linkContextDevtools.isExactActive = isExactActive.value; - }, { flush: 'post' }); - } - } - /** - * NOTE: update {@link _RouterLinkI}'s `$slots` type when updating this - */ - return { - route, - href: computed(() => route.value.href), - isActive, - isExactActive, - navigate, - }; -} -const RouterLinkImpl = /*#__PURE__*/ defineComponent({ - name: 'RouterLink', - compatConfig: { MODE: 3 }, - props: { - to: { - type: [String, Object], - required: true, - }, - replace: Boolean, - activeClass: String, - // inactiveClass: String, - exactActiveClass: String, - custom: Boolean, - ariaCurrentValue: { - type: String, - default: 'page', - }, - }, - useLink, - setup(props, { slots }) { - const link = reactive(useLink(props)); - const { options } = inject(routerKey); - const elClass = computed(() => ({ - [getLinkClass(props.activeClass, options.linkActiveClass, 'router-link-active')]: link.isActive, - // [getLinkClass( - // props.inactiveClass, - // options.linkInactiveClass, - // 'router-link-inactive' - // )]: !link.isExactActive, - [getLinkClass(props.exactActiveClass, options.linkExactActiveClass, 'router-link-exact-active')]: link.isExactActive, - })); - return () => { - const children = slots.default && slots.default(link); - return props.custom - ? children - : h('a', { - 'aria-current': link.isExactActive - ? props.ariaCurrentValue - : null, - href: link.href, - // this would override user added attrs but Vue will still add - // the listener, so we end up triggering both - onClick: link.navigate, - class: elClass.value, - }, children); - }; - }, -}); -// export the public type for h/tsx inference -// also to avoid inline import() in generated d.ts files -/** - * Component to render a link that triggers a navigation on click. - */ -const RouterLink = RouterLinkImpl; -function guardEvent(e) { - // don't redirect with control keys - if (e.metaKey || e.altKey || e.ctrlKey || e.shiftKey) - return; - // don't redirect when preventDefault called - if (e.defaultPrevented) - return; - // don't redirect on right click - if (e.button !== undefined && e.button !== 0) - return; - // don't redirect if `target="_blank"` - // @ts-expect-error getAttribute does exist - if (e.currentTarget && e.currentTarget.getAttribute) { - // @ts-expect-error getAttribute exists - const target = e.currentTarget.getAttribute('target'); - if (/\b_blank\b/i.test(target)) - return; - } - // this may be a Weex event which doesn't have this method - if (e.preventDefault) - e.preventDefault(); - return true; -} -function includesParams(outer, inner) { - for (const key in inner) { - const innerValue = inner[key]; - const outerValue = outer[key]; - if (typeof innerValue === 'string') { - if (innerValue !== outerValue) - return false; - } - else { - if (!isArray(outerValue) || - outerValue.length !== innerValue.length || - innerValue.some((value, i) => value !== outerValue[i])) - return false; - } - } - return true; -} -/** - * Get the original path value of a record by following its aliasOf - * @param record - */ -function getOriginalPath(record) { - return record ? (record.aliasOf ? record.aliasOf.path : record.path) : ''; -} -/** - * Utility class to get the active class based on defaults. - * @param propClass - * @param globalClass - * @param defaultClass - */ -const getLinkClass = (propClass, globalClass, defaultClass) => propClass != null - ? propClass - : globalClass != null - ? globalClass - : defaultClass; - -const RouterViewImpl = /*#__PURE__*/ defineComponent({ - name: 'RouterView', - // #674 we manually inherit them - inheritAttrs: false, - props: { - name: { - type: String, - default: 'default', - }, - route: Object, - }, - // Better compat for @vue/compat users - // https://github.com/vuejs/router/issues/1315 - compatConfig: { MODE: 3 }, - setup(props, { attrs, slots }) { - (process.env.NODE_ENV !== 'production') && warnDeprecatedUsage(); - const injectedRoute = inject(routerViewLocationKey); - const routeToDisplay = computed(() => props.route || injectedRoute.value); - const injectedDepth = inject(viewDepthKey, 0); - // The depth changes based on empty components option, which allows passthrough routes e.g. routes with children - // that are used to reuse the `path` property - const depth = computed(() => { - let initialDepth = unref(injectedDepth); - const { matched } = routeToDisplay.value; - let matchedRoute; - while ((matchedRoute = matched[initialDepth]) && - !matchedRoute.components) { - initialDepth++; - } - return initialDepth; - }); - const matchedRouteRef = computed(() => routeToDisplay.value.matched[depth.value]); - provide(viewDepthKey, computed(() => depth.value + 1)); - provide(matchedRouteKey, matchedRouteRef); - provide(routerViewLocationKey, routeToDisplay); - const viewRef = ref(); - // watch at the same time the component instance, the route record we are - // rendering, and the name - watch(() => [viewRef.value, matchedRouteRef.value, props.name], ([instance, to, name], [oldInstance, from, oldName]) => { - // copy reused instances - if (to) { - // this will update the instance for new instances as well as reused - // instances when navigating to a new route - to.instances[name] = instance; - // the component instance is reused for a different route or name, so - // we copy any saved update or leave guards. With async setup, the - // mounting component will mount before the matchedRoute changes, - // making instance === oldInstance, so we check if guards have been - // added before. This works because we remove guards when - // unmounting/deactivating components - if (from && from !== to && instance && instance === oldInstance) { - if (!to.leaveGuards.size) { - to.leaveGuards = from.leaveGuards; - } - if (!to.updateGuards.size) { - to.updateGuards = from.updateGuards; - } - } - } - // trigger beforeRouteEnter next callbacks - if (instance && - to && - // if there is no instance but to and from are the same this might be - // the first visit - (!from || !isSameRouteRecord(to, from) || !oldInstance)) { - (to.enterCallbacks[name] || []).forEach(callback => callback(instance)); - } - }, { flush: 'post' }); - return () => { - const route = routeToDisplay.value; - // we need the value at the time we render because when we unmount, we - // navigated to a different location so the value is different - const currentName = props.name; - const matchedRoute = matchedRouteRef.value; - const ViewComponent = matchedRoute && matchedRoute.components[currentName]; - if (!ViewComponent) { - return normalizeSlot(slots.default, { Component: ViewComponent, route }); - } - // props from route configuration - const routePropsOption = matchedRoute.props[currentName]; - const routeProps = routePropsOption - ? routePropsOption === true - ? route.params - : typeof routePropsOption === 'function' - ? routePropsOption(route) - : routePropsOption - : null; - const onVnodeUnmounted = vnode => { - // remove the instance reference to prevent leak - if (vnode.component.isUnmounted) { - matchedRoute.instances[currentName] = null; - } - }; - const component = h(ViewComponent, assign({}, routeProps, attrs, { - onVnodeUnmounted, - ref: viewRef, - })); - if (((process.env.NODE_ENV !== 'production') || __VUE_PROD_DEVTOOLS__) && - isBrowser && - component.ref) { - // TODO: can display if it's an alias, its props - const info = { - depth: depth.value, - name: matchedRoute.name, - path: matchedRoute.path, - meta: matchedRoute.meta, - }; - const internalInstances = isArray(component.ref) - ? component.ref.map(r => r.i) - : [component.ref.i]; - internalInstances.forEach(instance => { - // @ts-expect-error - instance.__vrv_devtools = info; - }); - } - return ( - // pass the vnode to the slot as a prop. - // h and both accept vnodes - normalizeSlot(slots.default, { Component: component, route }) || - component); - }; - }, -}); -function normalizeSlot(slot, data) { - if (!slot) - return null; - const slotContent = slot(data); - return slotContent.length === 1 ? slotContent[0] : slotContent; -} -// export the public type for h/tsx inference -// also to avoid inline import() in generated d.ts files -/** - * Component to display the current route the user is at. - */ -const RouterView = RouterViewImpl; -// warn against deprecated usage with & -// due to functional component being no longer eager in Vue 3 -function warnDeprecatedUsage() { - const instance = getCurrentInstance(); - const parentName = instance.parent && instance.parent.type.name; - if (parentName && - (parentName === 'KeepAlive' || parentName.includes('Transition'))) { - const comp = parentName === 'KeepAlive' ? 'keep-alive' : 'transition'; - warn(` can no longer be used directly inside or .\n` + - `Use slot props instead:\n\n` + - `\n` + - ` <${comp}>\n` + - ` \n` + - ` \n` + - ``); - } -} - -/** - * Copies a route location and removes any problematic properties that cannot be shown in devtools (e.g. Vue instances). - * - * @param routeLocation - routeLocation to format - * @param tooltip - optional tooltip - * @returns a copy of the routeLocation - */ -function formatRouteLocation(routeLocation, tooltip) { - const copy = assign({}, routeLocation, { - // remove variables that can contain vue instances - matched: routeLocation.matched.map(matched => omit(matched, ['instances', 'children', 'aliasOf'])), - }); - return { - _custom: { - type: null, - readOnly: true, - display: routeLocation.fullPath, - tooltip, - value: copy, - }, - }; -} -function formatDisplay(display) { - return { - _custom: { - display, - }, - }; -} -// to support multiple router instances -let routerId = 0; -function addDevtools(app, router, matcher) { - // Take over router.beforeEach and afterEach - // make sure we are not registering the devtool twice - if (router.__hasDevtools) - return; - router.__hasDevtools = true; - // increment to support multiple router instances - const id = routerId++; - setupDevtoolsPlugin({ - id: 'org.vuejs.router' + (id ? '.' + id : ''), - label: 'Vue Router', - packageName: 'vue-router', - homepage: 'https://router.vuejs.org', - logo: 'https://router.vuejs.org/logo.png', - componentStateTypes: ['Routing'], - app, - }, api => { - if (typeof api.now !== 'function') { - console.warn('[Vue Router]: You seem to be using an outdated version of Vue Devtools. Are you still using the Beta release instead of the stable one? You can find the links at https://devtools.vuejs.org/guide/installation.html.'); - } - // display state added by the router - api.on.inspectComponent((payload, ctx) => { - if (payload.instanceData) { - payload.instanceData.state.push({ - type: 'Routing', - key: '$route', - editable: false, - value: formatRouteLocation(router.currentRoute.value, 'Current Route'), - }); - } - }); - // mark router-link as active and display tags on router views - api.on.visitComponentTree(({ treeNode: node, componentInstance }) => { - if (componentInstance.__vrv_devtools) { - const info = componentInstance.__vrv_devtools; - node.tags.push({ - label: (info.name ? `${info.name.toString()}: ` : '') + info.path, - textColor: 0, - tooltip: 'This component is rendered by <router-view>', - backgroundColor: PINK_500, - }); - } - // if multiple useLink are used - if (isArray(componentInstance.__vrl_devtools)) { - componentInstance.__devtoolsApi = api; - componentInstance.__vrl_devtools.forEach(devtoolsData => { - let backgroundColor = ORANGE_400; - let tooltip = ''; - if (devtoolsData.isExactActive) { - backgroundColor = LIME_500; - tooltip = 'This is exactly active'; - } - else if (devtoolsData.isActive) { - backgroundColor = BLUE_600; - tooltip = 'This link is active'; - } - node.tags.push({ - label: devtoolsData.route.path, - textColor: 0, - tooltip, - backgroundColor, - }); - }); - } - }); - watch(router.currentRoute, () => { - // refresh active state - refreshRoutesView(); - api.notifyComponentUpdate(); - api.sendInspectorTree(routerInspectorId); - api.sendInspectorState(routerInspectorId); - }); - const navigationsLayerId = 'router:navigations:' + id; - api.addTimelineLayer({ - id: navigationsLayerId, - label: `Router${id ? ' ' + id : ''} Navigations`, - color: 0x40a8c4, - }); - // const errorsLayerId = 'router:errors' - // api.addTimelineLayer({ - // id: errorsLayerId, - // label: 'Router Errors', - // color: 0xea5455, - // }) - router.onError((error, to) => { - api.addTimelineEvent({ - layerId: navigationsLayerId, - event: { - title: 'Error during Navigation', - subtitle: to.fullPath, - logType: 'error', - time: api.now(), - data: { error }, - groupId: to.meta.__navigationId, - }, - }); - }); - // attached to `meta` and used to group events - let navigationId = 0; - router.beforeEach((to, from) => { - const data = { - guard: formatDisplay('beforeEach'), - from: formatRouteLocation(from, 'Current Location during this navigation'), - to: formatRouteLocation(to, 'Target location'), - }; - // Used to group navigations together, hide from devtools - Object.defineProperty(to.meta, '__navigationId', { - value: navigationId++, - }); - api.addTimelineEvent({ - layerId: navigationsLayerId, - event: { - time: api.now(), - title: 'Start of navigation', - subtitle: to.fullPath, - data, - groupId: to.meta.__navigationId, - }, - }); - }); - router.afterEach((to, from, failure) => { - const data = { - guard: formatDisplay('afterEach'), - }; - if (failure) { - data.failure = { - _custom: { - type: Error, - readOnly: true, - display: failure ? failure.message : '', - tooltip: 'Navigation Failure', - value: failure, - }, - }; - data.status = formatDisplay('❌'); - } - else { - data.status = formatDisplay('✅'); - } - // we set here to have the right order - data.from = formatRouteLocation(from, 'Current Location during this navigation'); - data.to = formatRouteLocation(to, 'Target location'); - api.addTimelineEvent({ - layerId: navigationsLayerId, - event: { - title: 'End of navigation', - subtitle: to.fullPath, - time: api.now(), - data, - logType: failure ? 'warning' : 'default', - groupId: to.meta.__navigationId, - }, - }); - }); - /** - * Inspector of Existing routes - */ - const routerInspectorId = 'router-inspector:' + id; - api.addInspector({ - id: routerInspectorId, - label: 'Routes' + (id ? ' ' + id : ''), - icon: 'book', - treeFilterPlaceholder: 'Search routes', - }); - function refreshRoutesView() { - // the routes view isn't active - if (!activeRoutesPayload) - return; - const payload = activeRoutesPayload; - // children routes will appear as nested - let routes = matcher.getRoutes().filter(route => !route.parent); - // reset match state to false - routes.forEach(resetMatchStateOnRouteRecord); - // apply a match state if there is a payload - if (payload.filter) { - routes = routes.filter(route => - // save matches state based on the payload - isRouteMatching(route, payload.filter.toLowerCase())); - } - // mark active routes - routes.forEach(route => markRouteRecordActive(route, router.currentRoute.value)); - payload.rootNodes = routes.map(formatRouteRecordForInspector); - } - let activeRoutesPayload; - api.on.getInspectorTree(payload => { - activeRoutesPayload = payload; - if (payload.app === app && payload.inspectorId === routerInspectorId) { - refreshRoutesView(); - } - }); - /** - * Display information about the currently selected route record - */ - api.on.getInspectorState(payload => { - if (payload.app === app && payload.inspectorId === routerInspectorId) { - const routes = matcher.getRoutes(); - const route = routes.find(route => route.record.__vd_id === payload.nodeId); - if (route) { - payload.state = { - options: formatRouteRecordMatcherForStateInspector(route), - }; - } - } - }); - api.sendInspectorTree(routerInspectorId); - api.sendInspectorState(routerInspectorId); - }); -} -function modifierForKey(key) { - if (key.optional) { - return key.repeatable ? '*' : '?'; - } - else { - return key.repeatable ? '+' : ''; - } -} -function formatRouteRecordMatcherForStateInspector(route) { - const { record } = route; - const fields = [ - { editable: false, key: 'path', value: record.path }, - ]; - if (record.name != null) { - fields.push({ - editable: false, - key: 'name', - value: record.name, - }); - } - fields.push({ editable: false, key: 'regexp', value: route.re }); - if (route.keys.length) { - fields.push({ - editable: false, - key: 'keys', - value: { - _custom: { - type: null, - readOnly: true, - display: route.keys - .map(key => `${key.name}${modifierForKey(key)}`) - .join(' '), - tooltip: 'Param keys', - value: route.keys, - }, - }, - }); - } - if (record.redirect != null) { - fields.push({ - editable: false, - key: 'redirect', - value: record.redirect, - }); - } - if (route.alias.length) { - fields.push({ - editable: false, - key: 'aliases', - value: route.alias.map(alias => alias.record.path), - }); - } - if (Object.keys(route.record.meta).length) { - fields.push({ - editable: false, - key: 'meta', - value: route.record.meta, - }); - } - fields.push({ - key: 'score', - editable: false, - value: { - _custom: { - type: null, - readOnly: true, - display: route.score.map(score => score.join(', ')).join(' | '), - tooltip: 'Score used to sort routes', - value: route.score, - }, - }, - }); - return fields; -} -/** - * Extracted from tailwind palette - */ -const PINK_500 = 0xec4899; -const BLUE_600 = 0x2563eb; -const LIME_500 = 0x84cc16; -const CYAN_400 = 0x22d3ee; -const ORANGE_400 = 0xfb923c; -// const GRAY_100 = 0xf4f4f5 -const DARK = 0x666666; -function formatRouteRecordForInspector(route) { - const tags = []; - const { record } = route; - if (record.name != null) { - tags.push({ - label: String(record.name), - textColor: 0, - backgroundColor: CYAN_400, - }); - } - if (record.aliasOf) { - tags.push({ - label: 'alias', - textColor: 0, - backgroundColor: ORANGE_400, - }); - } - if (route.__vd_match) { - tags.push({ - label: 'matches', - textColor: 0, - backgroundColor: PINK_500, - }); - } - if (route.__vd_exactActive) { - tags.push({ - label: 'exact', - textColor: 0, - backgroundColor: LIME_500, - }); - } - if (route.__vd_active) { - tags.push({ - label: 'active', - textColor: 0, - backgroundColor: BLUE_600, - }); - } - if (record.redirect) { - tags.push({ - label: typeof record.redirect === 'string' - ? `redirect: ${record.redirect}` - : 'redirects', - textColor: 0xffffff, - backgroundColor: DARK, - }); - } - // add an id to be able to select it. Using the `path` is not possible because - // empty path children would collide with their parents - let id = record.__vd_id; - if (id == null) { - id = String(routeRecordId++); - record.__vd_id = id; - } - return { - id, - label: record.path, - tags, - children: route.children.map(formatRouteRecordForInspector), - }; -} -// incremental id for route records and inspector state -let routeRecordId = 0; -const EXTRACT_REGEXP_RE = /^\/(.*)\/([a-z]*)$/; -function markRouteRecordActive(route, currentRoute) { - // no route will be active if matched is empty - // reset the matching state - const isExactActive = currentRoute.matched.length && - isSameRouteRecord(currentRoute.matched[currentRoute.matched.length - 1], route.record); - route.__vd_exactActive = route.__vd_active = isExactActive; - if (!isExactActive) { - route.__vd_active = currentRoute.matched.some(match => isSameRouteRecord(match, route.record)); - } - route.children.forEach(childRoute => markRouteRecordActive(childRoute, currentRoute)); -} -function resetMatchStateOnRouteRecord(route) { - route.__vd_match = false; - route.children.forEach(resetMatchStateOnRouteRecord); -} -function isRouteMatching(route, filter) { - const found = String(route.re).match(EXTRACT_REGEXP_RE); - route.__vd_match = false; - if (!found || found.length < 3) { - return false; - } - // use a regexp without $ at the end to match nested routes better - const nonEndingRE = new RegExp(found[1].replace(/\$$/, ''), found[2]); - if (nonEndingRE.test(filter)) { - // mark children as matches - route.children.forEach(child => isRouteMatching(child, filter)); - // exception case: `/` - if (route.record.path !== '/' || filter === '/') { - route.__vd_match = route.re.test(filter); - return true; - } - // hide the / route - return false; - } - const path = route.record.path.toLowerCase(); - const decodedPath = decode(path); - // also allow partial matching on the path - if (!filter.startsWith('/') && - (decodedPath.includes(filter) || path.includes(filter))) - return true; - if (decodedPath.startsWith(filter) || path.startsWith(filter)) - return true; - if (route.record.name && String(route.record.name).includes(filter)) - return true; - return route.children.some(child => isRouteMatching(child, filter)); -} -function omit(obj, keys) { - const ret = {}; - for (const key in obj) { - if (!keys.includes(key)) { - // @ts-expect-error - ret[key] = obj[key]; - } - } - return ret; -} - -/** - * Creates a Router instance that can be used by a Vue app. - * - * @param options - {@link RouterOptions} - */ -function createRouter(options) { - const matcher = createRouterMatcher(options.routes, options); - const parseQuery$1 = options.parseQuery || parseQuery; - const stringifyQuery$1 = options.stringifyQuery || stringifyQuery; - const routerHistory = options.history; - if ((process.env.NODE_ENV !== 'production') && !routerHistory) - throw new Error('Provide the "history" option when calling "createRouter()":' + - ' https://next.router.vuejs.org/api/#history.'); - const beforeGuards = useCallbacks(); - const beforeResolveGuards = useCallbacks(); - const afterGuards = useCallbacks(); - const currentRoute = shallowRef(START_LOCATION_NORMALIZED); - let pendingLocation = START_LOCATION_NORMALIZED; - // leave the scrollRestoration if no scrollBehavior is provided - if (isBrowser && options.scrollBehavior && 'scrollRestoration' in history) { - history.scrollRestoration = 'manual'; - } - const normalizeParams = applyToParams.bind(null, paramValue => '' + paramValue); - const encodeParams = applyToParams.bind(null, encodeParam); - const decodeParams = - // @ts-expect-error: intentionally avoid the type check - applyToParams.bind(null, decode); - function addRoute(parentOrRoute, route) { - let parent; - let record; - if (isRouteName(parentOrRoute)) { - parent = matcher.getRecordMatcher(parentOrRoute); - record = route; - } - else { - record = parentOrRoute; - } - return matcher.addRoute(record, parent); - } - function removeRoute(name) { - const recordMatcher = matcher.getRecordMatcher(name); - if (recordMatcher) { - matcher.removeRoute(recordMatcher); - } - else if ((process.env.NODE_ENV !== 'production')) { - warn(`Cannot remove non-existent route "${String(name)}"`); - } - } - function getRoutes() { - return matcher.getRoutes().map(routeMatcher => routeMatcher.record); - } - function hasRoute(name) { - return !!matcher.getRecordMatcher(name); - } - function resolve(rawLocation, currentLocation) { - // const objectLocation = routerLocationAsObject(rawLocation) - // we create a copy to modify it later - currentLocation = assign({}, currentLocation || currentRoute.value); - if (typeof rawLocation === 'string') { - const locationNormalized = parseURL(parseQuery$1, rawLocation, currentLocation.path); - const matchedRoute = matcher.resolve({ path: locationNormalized.path }, currentLocation); - const href = routerHistory.createHref(locationNormalized.fullPath); - if ((process.env.NODE_ENV !== 'production')) { - if (href.startsWith('//')) - warn(`Location "${rawLocation}" resolved to "${href}". A resolved location cannot start with multiple slashes.`); - else if (!matchedRoute.matched.length) { - warn(`No match found for location with path "${rawLocation}"`); - } - } - // locationNormalized is always a new object - return assign(locationNormalized, matchedRoute, { - params: decodeParams(matchedRoute.params), - hash: decode(locationNormalized.hash), - redirectedFrom: undefined, - href, - }); - } - let matcherLocation; - // path could be relative in object as well - if ('path' in rawLocation) { - if ((process.env.NODE_ENV !== 'production') && - 'params' in rawLocation && - !('name' in rawLocation) && - // @ts-expect-error: the type is never - Object.keys(rawLocation.params).length) { - warn(`Path "${ - // @ts-expect-error: the type is never - rawLocation.path}" was passed with params but they will be ignored. Use a named route alongside params instead.`); - } - matcherLocation = assign({}, rawLocation, { - path: parseURL(parseQuery$1, rawLocation.path, currentLocation.path).path, - }); - } - else { - // remove any nullish param - const targetParams = assign({}, rawLocation.params); - for (const key in targetParams) { - if (targetParams[key] == null) { - delete targetParams[key]; - } - } - // pass encoded values to the matcher, so it can produce encoded path and fullPath - matcherLocation = assign({}, rawLocation, { - params: encodeParams(rawLocation.params), - }); - // current location params are decoded, we need to encode them in case the - // matcher merges the params - currentLocation.params = encodeParams(currentLocation.params); - } - const matchedRoute = matcher.resolve(matcherLocation, currentLocation); - const hash = rawLocation.hash || ''; - if ((process.env.NODE_ENV !== 'production') && hash && !hash.startsWith('#')) { - warn(`A \`hash\` should always start with the character "#". Replace "${hash}" with "#${hash}".`); - } - // the matcher might have merged current location params, so - // we need to run the decoding again - matchedRoute.params = normalizeParams(decodeParams(matchedRoute.params)); - const fullPath = stringifyURL(stringifyQuery$1, assign({}, rawLocation, { - hash: encodeHash(hash), - path: matchedRoute.path, - })); - const href = routerHistory.createHref(fullPath); - if ((process.env.NODE_ENV !== 'production')) { - if (href.startsWith('//')) { - warn(`Location "${rawLocation}" resolved to "${href}". A resolved location cannot start with multiple slashes.`); - } - else if (!matchedRoute.matched.length) { - warn(`No match found for location with path "${'path' in rawLocation ? rawLocation.path : rawLocation}"`); - } - } - return assign({ - fullPath, - // keep the hash encoded so fullPath is effectively path + encodedQuery + - // hash - hash, - query: - // if the user is using a custom query lib like qs, we might have - // nested objects, so we keep the query as is, meaning it can contain - // numbers at `$route.query`, but at the point, the user will have to - // use their own type anyway. - // https://github.com/vuejs/router/issues/328#issuecomment-649481567 - stringifyQuery$1 === stringifyQuery - ? normalizeQuery(rawLocation.query) - : (rawLocation.query || {}), - }, matchedRoute, { - redirectedFrom: undefined, - href, - }); - } - function locationAsObject(to) { - return typeof to === 'string' - ? parseURL(parseQuery$1, to, currentRoute.value.path) - : assign({}, to); - } - function checkCanceledNavigation(to, from) { - if (pendingLocation !== to) { - return createRouterError(8 /* ErrorTypes.NAVIGATION_CANCELLED */, { - from, - to, - }); - } - } - function push(to) { - return pushWithRedirect(to); - } - function replace(to) { - return push(assign(locationAsObject(to), { replace: true })); - } - function handleRedirectRecord(to) { - const lastMatched = to.matched[to.matched.length - 1]; - if (lastMatched && lastMatched.redirect) { - const { redirect } = lastMatched; - let newTargetLocation = typeof redirect === 'function' ? redirect(to) : redirect; - if (typeof newTargetLocation === 'string') { - newTargetLocation = - newTargetLocation.includes('?') || newTargetLocation.includes('#') - ? (newTargetLocation = locationAsObject(newTargetLocation)) - : // force empty params - { path: newTargetLocation }; - // @ts-expect-error: force empty params when a string is passed to let - // the router parse them again - newTargetLocation.params = {}; - } - if ((process.env.NODE_ENV !== 'production') && - !('path' in newTargetLocation) && - !('name' in newTargetLocation)) { - warn(`Invalid redirect found:\n${JSON.stringify(newTargetLocation, null, 2)}\n when navigating to "${to.fullPath}". A redirect must contain a name or path. This will break in production.`); - throw new Error('Invalid redirect'); - } - return assign({ - query: to.query, - hash: to.hash, - // avoid transferring params if the redirect has a path - params: 'path' in newTargetLocation ? {} : to.params, - }, newTargetLocation); - } - } - function pushWithRedirect(to, redirectedFrom) { - const targetLocation = (pendingLocation = resolve(to)); - const from = currentRoute.value; - const data = to.state; - const force = to.force; - // to could be a string where `replace` is a function - const replace = to.replace === true; - const shouldRedirect = handleRedirectRecord(targetLocation); - if (shouldRedirect) - return pushWithRedirect(assign(locationAsObject(shouldRedirect), { - state: typeof shouldRedirect === 'object' - ? assign({}, data, shouldRedirect.state) - : data, - force, - replace, - }), - // keep original redirectedFrom if it exists - redirectedFrom || targetLocation); - // if it was a redirect we already called `pushWithRedirect` above - const toLocation = targetLocation; - toLocation.redirectedFrom = redirectedFrom; - let failure; - if (!force && isSameRouteLocation(stringifyQuery$1, from, targetLocation)) { - failure = createRouterError(16 /* ErrorTypes.NAVIGATION_DUPLICATED */, { to: toLocation, from }); - // trigger scroll to allow scrolling to the same anchor - handleScroll(from, from, - // this is a push, the only way for it to be triggered from a - // history.listen is with a redirect, which makes it become a push - true, - // This cannot be the first navigation because the initial location - // cannot be manually navigated to - false); - } - return (failure ? Promise.resolve(failure) : navigate(toLocation, from)) - .catch((error) => isNavigationFailure(error) - ? // navigation redirects still mark the router as ready - isNavigationFailure(error, 2 /* ErrorTypes.NAVIGATION_GUARD_REDIRECT */) - ? error - : markAsReady(error) // also returns the error - : // reject any unknown error - triggerError(error, toLocation, from)) - .then((failure) => { - if (failure) { - if (isNavigationFailure(failure, 2 /* ErrorTypes.NAVIGATION_GUARD_REDIRECT */)) { - if ((process.env.NODE_ENV !== 'production') && - // we are redirecting to the same location we were already at - isSameRouteLocation(stringifyQuery$1, resolve(failure.to), toLocation) && - // and we have done it a couple of times - redirectedFrom && - // @ts-expect-error: added only in dev - (redirectedFrom._count = redirectedFrom._count - ? // @ts-expect-error - redirectedFrom._count + 1 - : 1) > 10) { - warn(`Detected an infinite redirection in a navigation guard when going from "${from.fullPath}" to "${toLocation.fullPath}". Aborting to avoid a Stack Overflow. This will break in production if not fixed.`); - return Promise.reject(new Error('Infinite redirect in navigation guard')); - } - return pushWithRedirect( - // keep options - assign({ - // preserve an existing replacement but allow the redirect to override it - replace, - }, locationAsObject(failure.to), { - state: typeof failure.to === 'object' - ? assign({}, data, failure.to.state) - : data, - force, - }), - // preserve the original redirectedFrom if any - redirectedFrom || toLocation); - } - } - else { - // if we fail we don't finalize the navigation - failure = finalizeNavigation(toLocation, from, true, replace, data); - } - triggerAfterEach(toLocation, from, failure); - return failure; - }); - } - /** - * Helper to reject and skip all navigation guards if a new navigation happened - * @param to - * @param from - */ - function checkCanceledNavigationAndReject(to, from) { - const error = checkCanceledNavigation(to, from); - return error ? Promise.reject(error) : Promise.resolve(); - } - // TODO: refactor the whole before guards by internally using router.beforeEach - function navigate(to, from) { - let guards; - const [leavingRecords, updatingRecords, enteringRecords] = extractChangingRecords(to, from); - // all components here have been resolved once because we are leaving - guards = extractComponentsGuards(leavingRecords.reverse(), 'beforeRouteLeave', to, from); - // leavingRecords is already reversed - for (const record of leavingRecords) { - record.leaveGuards.forEach(guard => { - guards.push(guardToPromiseFn(guard, to, from)); - }); - } - const canceledNavigationCheck = checkCanceledNavigationAndReject.bind(null, to, from); - guards.push(canceledNavigationCheck); - // run the queue of per route beforeRouteLeave guards - return (runGuardQueue(guards) - .then(() => { - // check global guards beforeEach - guards = []; - for (const guard of beforeGuards.list()) { - guards.push(guardToPromiseFn(guard, to, from)); - } - guards.push(canceledNavigationCheck); - return runGuardQueue(guards); - }) - .then(() => { - // check in components beforeRouteUpdate - guards = extractComponentsGuards(updatingRecords, 'beforeRouteUpdate', to, from); - for (const record of updatingRecords) { - record.updateGuards.forEach(guard => { - guards.push(guardToPromiseFn(guard, to, from)); - }); - } - guards.push(canceledNavigationCheck); - // run the queue of per route beforeEnter guards - return runGuardQueue(guards); - }) - .then(() => { - // check the route beforeEnter - guards = []; - for (const record of to.matched) { - // do not trigger beforeEnter on reused views - if (record.beforeEnter && !from.matched.includes(record)) { - if (isArray(record.beforeEnter)) { - for (const beforeEnter of record.beforeEnter) - guards.push(guardToPromiseFn(beforeEnter, to, from)); - } - else { - guards.push(guardToPromiseFn(record.beforeEnter, to, from)); - } - } - } - guards.push(canceledNavigationCheck); - // run the queue of per route beforeEnter guards - return runGuardQueue(guards); - }) - .then(() => { - // NOTE: at this point to.matched is normalized and does not contain any () => Promise - // clear existing enterCallbacks, these are added by extractComponentsGuards - to.matched.forEach(record => (record.enterCallbacks = {})); - // check in-component beforeRouteEnter - guards = extractComponentsGuards(enteringRecords, 'beforeRouteEnter', to, from); - guards.push(canceledNavigationCheck); - // run the queue of per route beforeEnter guards - return runGuardQueue(guards); - }) - .then(() => { - // check global guards beforeResolve - guards = []; - for (const guard of beforeResolveGuards.list()) { - guards.push(guardToPromiseFn(guard, to, from)); - } - guards.push(canceledNavigationCheck); - return runGuardQueue(guards); - }) - // catch any navigation canceled - .catch(err => isNavigationFailure(err, 8 /* ErrorTypes.NAVIGATION_CANCELLED */) - ? err - : Promise.reject(err))); - } - function triggerAfterEach(to, from, failure) { - // navigation is confirmed, call afterGuards - // TODO: wrap with error handlers - for (const guard of afterGuards.list()) - guard(to, from, failure); - } - /** - * - Cleans up any navigation guards - * - Changes the url if necessary - * - Calls the scrollBehavior - */ - function finalizeNavigation(toLocation, from, isPush, replace, data) { - // a more recent navigation took place - const error = checkCanceledNavigation(toLocation, from); - if (error) - return error; - // only consider as push if it's not the first navigation - const isFirstNavigation = from === START_LOCATION_NORMALIZED; - const state = !isBrowser ? {} : history.state; - // change URL only if the user did a push/replace and if it's not the initial navigation because - // it's just reflecting the url - if (isPush) { - // on the initial navigation, we want to reuse the scroll position from - // history state if it exists - if (replace || isFirstNavigation) - routerHistory.replace(toLocation.fullPath, assign({ - scroll: isFirstNavigation && state && state.scroll, - }, data)); - else - routerHistory.push(toLocation.fullPath, data); - } - // accept current navigation - currentRoute.value = toLocation; - handleScroll(toLocation, from, isPush, isFirstNavigation); - markAsReady(); - } - let removeHistoryListener; - // attach listener to history to trigger navigations - function setupListeners() { - // avoid setting up listeners twice due to an invalid first navigation - if (removeHistoryListener) - return; - removeHistoryListener = routerHistory.listen((to, _from, info) => { - if (!router.listening) - return; - // cannot be a redirect route because it was in history - const toLocation = resolve(to); - // due to dynamic routing, and to hash history with manual navigation - // (manually changing the url or calling history.hash = '#/somewhere'), - // there could be a redirect record in history - const shouldRedirect = handleRedirectRecord(toLocation); - if (shouldRedirect) { - pushWithRedirect(assign(shouldRedirect, { replace: true }), toLocation).catch(noop); - return; - } - pendingLocation = toLocation; - const from = currentRoute.value; - // TODO: should be moved to web history? - if (isBrowser) { - saveScrollPosition(getScrollKey(from.fullPath, info.delta), computeScrollPosition()); - } - navigate(toLocation, from) - .catch((error) => { - if (isNavigationFailure(error, 4 /* ErrorTypes.NAVIGATION_ABORTED */ | 8 /* ErrorTypes.NAVIGATION_CANCELLED */)) { - return error; - } - if (isNavigationFailure(error, 2 /* ErrorTypes.NAVIGATION_GUARD_REDIRECT */)) { - // Here we could call if (info.delta) routerHistory.go(-info.delta, - // false) but this is bug prone as we have no way to wait the - // navigation to be finished before calling pushWithRedirect. Using - // a setTimeout of 16ms seems to work but there is no guarantee for - // it to work on every browser. So instead we do not restore the - // history entry and trigger a new navigation as requested by the - // navigation guard. - // the error is already handled by router.push we just want to avoid - // logging the error - pushWithRedirect(error.to, toLocation - // avoid an uncaught rejection, let push call triggerError - ) - .then(failure => { - // manual change in hash history #916 ending up in the URL not - // changing, but it was changed by the manual url change, so we - // need to manually change it ourselves - if (isNavigationFailure(failure, 4 /* ErrorTypes.NAVIGATION_ABORTED */ | - 16 /* ErrorTypes.NAVIGATION_DUPLICATED */) && - !info.delta && - info.type === NavigationType.pop) { - routerHistory.go(-1, false); - } - }) - .catch(noop); - // avoid the then branch - return Promise.reject(); - } - // do not restore history on unknown direction - if (info.delta) { - routerHistory.go(-info.delta, false); - } - // unrecognized error, transfer to the global handler - return triggerError(error, toLocation, from); - }) - .then((failure) => { - failure = - failure || - finalizeNavigation( - // after navigation, all matched components are resolved - toLocation, from, false); - // revert the navigation - if (failure) { - if (info.delta && - // a new navigation has been triggered, so we do not want to revert, that will change the current history - // entry while a different route is displayed - !isNavigationFailure(failure, 8 /* ErrorTypes.NAVIGATION_CANCELLED */)) { - routerHistory.go(-info.delta, false); - } - else if (info.type === NavigationType.pop && - isNavigationFailure(failure, 4 /* ErrorTypes.NAVIGATION_ABORTED */ | 16 /* ErrorTypes.NAVIGATION_DUPLICATED */)) { - // manual change in hash history #916 - // it's like a push but lacks the information of the direction - routerHistory.go(-1, false); - } - } - triggerAfterEach(toLocation, from, failure); - }) - .catch(noop); - }); - } - // Initialization and Errors - let readyHandlers = useCallbacks(); - let errorHandlers = useCallbacks(); - let ready; - /** - * Trigger errorHandlers added via onError and throws the error as well - * - * @param error - error to throw - * @param to - location we were navigating to when the error happened - * @param from - location we were navigating from when the error happened - * @returns the error as a rejected promise - */ - function triggerError(error, to, from) { - markAsReady(error); - const list = errorHandlers.list(); - if (list.length) { - list.forEach(handler => handler(error, to, from)); - } - else { - if ((process.env.NODE_ENV !== 'production')) { - warn('uncaught error during route navigation:'); - } - console.error(error); - } - return Promise.reject(error); - } - function isReady() { - if (ready && currentRoute.value !== START_LOCATION_NORMALIZED) - return Promise.resolve(); - return new Promise((resolve, reject) => { - readyHandlers.add([resolve, reject]); - }); - } - function markAsReady(err) { - if (!ready) { - // still not ready if an error happened - ready = !err; - setupListeners(); - readyHandlers - .list() - .forEach(([resolve, reject]) => (err ? reject(err) : resolve())); - readyHandlers.reset(); - } - return err; - } - // Scroll behavior - function handleScroll(to, from, isPush, isFirstNavigation) { - const { scrollBehavior } = options; - if (!isBrowser || !scrollBehavior) - return Promise.resolve(); - const scrollPosition = (!isPush && getSavedScrollPosition(getScrollKey(to.fullPath, 0))) || - ((isFirstNavigation || !isPush) && - history.state && - history.state.scroll) || - null; - return nextTick() - .then(() => scrollBehavior(to, from, scrollPosition)) - .then(position => position && scrollToPosition(position)) - .catch(err => triggerError(err, to, from)); - } - const go = (delta) => routerHistory.go(delta); - let started; - const installedApps = new Set(); - const router = { - currentRoute, - listening: true, - addRoute, - removeRoute, - hasRoute, - getRoutes, - resolve, - options, - push, - replace, - go, - back: () => go(-1), - forward: () => go(1), - beforeEach: beforeGuards.add, - beforeResolve: beforeResolveGuards.add, - afterEach: afterGuards.add, - onError: errorHandlers.add, - isReady, - install(app) { - const router = this; - app.component('RouterLink', RouterLink); - app.component('RouterView', RouterView); - app.config.globalProperties.$router = router; - Object.defineProperty(app.config.globalProperties, '$route', { - enumerable: true, - get: () => unref(currentRoute), - }); - // this initial navigation is only necessary on client, on server it doesn't - // make sense because it will create an extra unnecessary navigation and could - // lead to problems - if (isBrowser && - // used for the initial navigation client side to avoid pushing - // multiple times when the router is used in multiple apps - !started && - currentRoute.value === START_LOCATION_NORMALIZED) { - // see above - started = true; - push(routerHistory.location).catch(err => { - if ((process.env.NODE_ENV !== 'production')) - warn('Unexpected error when starting the router:', err); - }); - } - const reactiveRoute = {}; - for (const key in START_LOCATION_NORMALIZED) { - // @ts-expect-error: the key matches - reactiveRoute[key] = computed(() => currentRoute.value[key]); - } - app.provide(routerKey, router); - app.provide(routeLocationKey, reactive(reactiveRoute)); - app.provide(routerViewLocationKey, currentRoute); - const unmountApp = app.unmount; - installedApps.add(app); - app.unmount = function () { - installedApps.delete(app); - // the router is not attached to an app anymore - if (installedApps.size < 1) { - // invalidate the current navigation - pendingLocation = START_LOCATION_NORMALIZED; - removeHistoryListener && removeHistoryListener(); - removeHistoryListener = null; - currentRoute.value = START_LOCATION_NORMALIZED; - started = false; - ready = false; - } - unmountApp(); - }; - // TODO: this probably needs to be updated so it can be used by vue-termui - if (((process.env.NODE_ENV !== 'production') || __VUE_PROD_DEVTOOLS__) && isBrowser) { - addDevtools(app, router, matcher); - } - }, - }; - return router; -} -function runGuardQueue(guards) { - return guards.reduce((promise, guard) => promise.then(() => guard()), Promise.resolve()); -} -function extractChangingRecords(to, from) { - const leavingRecords = []; - const updatingRecords = []; - const enteringRecords = []; - const len = Math.max(from.matched.length, to.matched.length); - for (let i = 0; i < len; i++) { - const recordFrom = from.matched[i]; - if (recordFrom) { - if (to.matched.find(record => isSameRouteRecord(record, recordFrom))) - updatingRecords.push(recordFrom); - else - leavingRecords.push(recordFrom); - } - const recordTo = to.matched[i]; - if (recordTo) { - // the type doesn't matter because we are comparing per reference - if (!from.matched.find(record => isSameRouteRecord(record, recordTo))) { - enteringRecords.push(recordTo); - } - } - } - return [leavingRecords, updatingRecords, enteringRecords]; -} - -/** - * Returns the router instance. Equivalent to using `$router` inside - * templates. - */ -function useRouter() { - return inject(routerKey); -} -/** - * Returns the current route location. Equivalent to using `$route` inside - * templates. - */ -function useRoute() { - return inject(routeLocationKey); -} - -export { NavigationFailureType, RouterLink, RouterView, START_LOCATION_NORMALIZED as START_LOCATION, createMemoryHistory, createRouter, createRouterMatcher, createWebHashHistory, createWebHistory, isNavigationFailure, loadRouteLocation, matchedRouteKey, onBeforeRouteLeave, onBeforeRouteUpdate, parseQuery, routeLocationKey, routerKey, routerViewLocationKey, stringifyQuery, useLink, useRoute, useRouter, viewDepthKey }; diff --git a/backend/middlewares/event/test-vue/tsconfig.json b/backend/middlewares/event/test-vue/tsconfig.json deleted file mode 100644 index cb2043bce..000000000 --- a/backend/middlewares/event/test-vue/tsconfig.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "extends": "@vue/tsconfig/tsconfig.web.json", - "include": ["env.d.ts", "src/**/*", "src/**/*.vue"], - "compilerOptions": { - "baseUrl": ".", - "paths": { - "@/*": ["./src/*"] - } - }, - - "references": [ - { - "path": "./tsconfig.node.json" - } - ] -} diff --git a/backend/middlewares/event/test-vue/tsconfig.node.json b/backend/middlewares/event/test-vue/tsconfig.node.json deleted file mode 100644 index 424084aa5..000000000 --- a/backend/middlewares/event/test-vue/tsconfig.node.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "@vue/tsconfig/tsconfig.node.json", - "include": ["vite.config.*", "vitest.config.*", "cypress.config.*", "playwright.config.*"], - "compilerOptions": { - "composite": true, - "types": ["node"] - } -} diff --git a/backend/middlewares/event/test-vue/vite.config.ts b/backend/middlewares/event/test-vue/vite.config.ts deleted file mode 100644 index 99135b242..000000000 --- a/backend/middlewares/event/test-vue/vite.config.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { fileURLToPath, URL } from 'node:url' - -import { defineConfig } from 'vite' -import vue from '@vitejs/plugin-vue' -import wasm from "vite-plugin-wasm" -import topLevelAwait from "vite-plugin-top-level-await" - -export default defineConfig({ - plugins: [wasm(), topLevelAwait(), vue()], - resolve: { - alias: { - '@': fileURLToPath(new URL('./src', import.meta.url)) - } - } -}) diff --git a/backend/middlewares/event/tests/config/conf-default.toml b/backend/middlewares/event/tests/config/conf-default.toml index 2ff8ae45a..be33c2b91 100644 --- a/backend/middlewares/event/tests/config/conf-default.toml +++ b/backend/middlewares/event/tests/config/conf-default.toml @@ -1,7 +1,9 @@ [cs] [csm.event] -event_url = "ws://127.0.0.1:8080/event/" +enable = true +cluster = "singleton" +durable = true [fw.web_server] port = 8080 @@ -13,3 +15,7 @@ doc_urls = [["test env", "http://127.0.0.1:8080/"]] [fw.cluster] watch_kind = "cache" cache_check_interval_sec = 10 + +[fw.log] +level = "debug" +directives = ["openraft=off"] \ No newline at end of file diff --git a/backend/middlewares/event/tests/test_event.rs b/backend/middlewares/event/tests/test_event.rs index a48ccdcce..5a8587bf7 100644 --- a/backend/middlewares/event/tests/test_event.rs +++ b/backend/middlewares/event/tests/test_event.rs @@ -1,53 +1,104 @@ use std::env; use std::time::Duration; +use asteroid_mq::prelude::{Interest, Subject, TopicCode}; +use asteroid_mq_sdk::model::EdgeMessage; +use asteroid_mq_sdk::ClientNode; use bios_basic::rbum::rbum_config::RbumConfig; use bios_basic::test::init_test_container; use bios_basic::test::test_http_client::TestHttpClient; use bios_mw_event::event_constants::DOMAIN_CODE; use bios_mw_event::event_initializer; use tardis::basic::dto::TardisContext; -use tardis::basic::result::TardisResult; -use tardis::tokio::time::sleep; -use tardis::{testcontainers, tokio, TardisFuns}; - +use tardis::log as tracing; +use tardis::serde_json::{json, Value}; +use tardis::web::web_resp::Void; +use tardis::{tardis_static, testcontainers, tokio, TardisFuns}; #[tokio::test(flavor = "multi_thread")] -async fn test_event() -> TardisResult<()> { +async fn test_event() -> Result<(), Box> { env::set_var("RUST_LOG", "debug,tardis=trace,bios_mw_event=trace,test_event=trace,sqlx::query=off"); let docker = testcontainers::clients::Cli::default(); let _x = init_test_container::init(&docker, None).await?; init_data().await?; + test_event_topic_api().await?; Ok(()) } -async fn init_data() -> TardisResult<()> { +async fn init_data() -> Result<(), Box> { // Initialize RBUM bios_basic::rbum::rbum_initializer::init(DOMAIN_CODE, RbumConfig::default()).await?; let web_server = TardisFuns::web_server(); // Initialize Event - event_initializer::init(web_server.as_ref()).await.unwrap(); - tokio::spawn(async move { - web_server.start().await.unwrap(); - }); - - sleep(Duration::from_millis(500)).await; + event_initializer::init(web_server.as_ref()).await?; + web_server.start().await?; let ctx = TardisContext { - own_paths: "".to_string(), - ak: "".to_string(), + own_paths: "test".to_string(), + ak: "test".to_string(), roles: vec![], groups: vec![], - owner: "".to_string(), + owner: "test-owner".to_string(), ..Default::default() }; let mut client = TestHttpClient::new(format!("http://127.0.0.1:8080/{}", DOMAIN_CODE)); client.set_auth(&ctx)?; + tokio::time::sleep(Duration::from_secs(5)).await; + Ok(()) +} + +tardis_static! { + pub test_tardis_context: TardisContext = TardisContext { + own_paths: "test".to_string(), + ak: "test".to_string(), + roles: vec![], + groups: vec![], + owner: "test-owner".to_string(), + ..Default::default() + }; +} + +pub async fn test_event_topic_api() -> Result<(), Box> { + const TEST_TOPIC_NAME: &str = "test-topic"; + let mut client = TestHttpClient::new(format!("http://127.0.0.1:8080/{}", DOMAIN_CODE)); + let ctx = test_tardis_context(); + client.set_auth(ctx).unwrap(); + let id = client + .post::<_, String>( + "/ci/topic", + &json! {{ + "code": TEST_TOPIC_NAME, + "name": "topic/hello", + "topic_code": TEST_TOPIC_NAME, + "overflow_policy": "RejectNew", + "overflow_size": 500, + }}, + ) + .await; + tracing::info!(?id, "event registered"); + let topics = client.get::("/ci/topic?page_number=1&page_size=10").await; + tracing::info!(?topics, "event paged list"); + let bind_result = client.put::("/ca/register", &Void).await; + let node_id = bind_result["node_id"].as_str().expect("node_id is settled"); + tracing::info!(?node_id, "bind context result"); + let client_node = ClientNode::connect(format!("ws://127.0.0.1:8080/{DOMAIN_CODE}/ca/connect?node_id={}", node_id)).await?; + const TOPIC_CODE: TopicCode = TopicCode::const_new(TEST_TOPIC_NAME); + let mut ep = client_node.create_endpoint(TopicCode::const_new(TEST_TOPIC_NAME), [Interest::new("test_node")]).await?; + tokio::spawn(async move { + while let Some(message) = ep.next_message().await { + tracing::info!(payload = ?message.text().unwrap(), "received message"); + let _ = message.ack_processed().await; + } + }); + let message = EdgeMessage::builder(TOPIC_CODE, [Subject::const_new("test_node")], "test message").build(); + let _ack = client_node.send_message(message).await?; + + tokio::time::sleep(Duration::from_secs(5)).await; Ok(()) } diff --git a/backend/middlewares/flow/src/api/cc/flow_cc_inst_api.rs b/backend/middlewares/flow/src/api/cc/flow_cc_inst_api.rs index b0db47cc2..9f3b0aa65 100644 --- a/backend/middlewares/flow/src/api/cc/flow_cc_inst_api.rs +++ b/backend/middlewares/flow/src/api/cc/flow_cc_inst_api.rs @@ -128,7 +128,15 @@ impl FlowCcInstApi { let funs = flow_constants::get_tardis_inst(); let mut transfer = transfer_req.0; FlowInstServ::check_transfer_vars(&flow_inst_id.0, &mut transfer, &funs, &ctx.0).await?; - let result = FlowInstServ::transfer(&flow_inst_id.0, &transfer, false, FlowExternalCallbackOp::Default, loop_check_helper::InstancesTransition::default(), &ctx.0).await?; + let result = FlowInstServ::transfer( + &flow_inst_id.0, + &transfer, + false, + FlowExternalCallbackOp::Default, + loop_check_helper::InstancesTransition::default(), + &ctx.0, + ) + .await?; ctx.0.execute_task().await?; TardisResp::ok(result) } @@ -155,7 +163,17 @@ impl FlowCcInstApi { flow_inst_id_transfer_map.insert(flow_inst_id, transfer_req); } for (flow_inst_id, transfer_req) in flow_inst_id_transfer_map { - result.push(FlowInstServ::transfer(flow_inst_id, &transfer_req, false, FlowExternalCallbackOp::Default, loop_check_helper::InstancesTransition::default(), &ctx.0).await?); + result.push( + FlowInstServ::transfer( + flow_inst_id, + &transfer_req, + false, + FlowExternalCallbackOp::Default, + loop_check_helper::InstancesTransition::default(), + &ctx.0, + ) + .await?, + ); } ctx.0.execute_task().await?; TardisResp::ok(result) diff --git a/backend/middlewares/flow/src/api/cc/flow_cc_model_api.rs b/backend/middlewares/flow/src/api/cc/flow_cc_model_api.rs index f09f03a65..f2fa10087 100644 --- a/backend/middlewares/flow/src/api/cc/flow_cc_model_api.rs +++ b/backend/middlewares/flow/src/api/cc/flow_cc_model_api.rs @@ -13,7 +13,8 @@ use tardis::web::poem_openapi::payload::Json; use tardis::web::web_resp::{TardisApiResult, TardisPage, TardisResp, Void}; use crate::dto::flow_model_dto::{ - FlowModelAddCustomModelReq, FlowModelAddCustomModelResp, FlowModelAddReq, FlowModelAggResp, FlowModelBindStateReq, FlowModelFilterReq, FlowModelFindRelStateResp, FlowModelModifyReq, FlowModelSortStatesReq, FlowModelSummaryResp, FlowModelUnbindStateReq + FlowModelAddCustomModelReq, FlowModelAddCustomModelResp, FlowModelAddReq, FlowModelAggResp, FlowModelBindStateReq, FlowModelFilterReq, FlowModelFindRelStateResp, + FlowModelModifyReq, FlowModelSortStatesReq, FlowModelSummaryResp, FlowModelUnbindStateReq, }; use crate::dto::flow_state_dto::FlowStateRelModelModifyReq; use crate::dto::flow_transition_dto::{FlowTransitionModifyReq, FlowTransitionSortStatesReq}; diff --git a/backend/middlewares/flow/src/api/ci/flow_ci_inst_api.rs b/backend/middlewares/flow/src/api/ci/flow_ci_inst_api.rs index 95ed65110..ae51cf918 100644 --- a/backend/middlewares/flow/src/api/ci/flow_ci_inst_api.rs +++ b/backend/middlewares/flow/src/api/ci/flow_ci_inst_api.rs @@ -15,7 +15,9 @@ use tardis::{log, tokio}; use crate::dto::flow_external_dto::FlowExternalCallbackOp; use crate::dto::flow_inst_dto::{ - FlowInstAbortReq, FlowInstBatchBindReq, FlowInstBatchBindResp, FlowInstBindReq, FlowInstDetailResp, FlowInstFindNextTransitionsReq, FlowInstFindStateAndTransitionsReq, FlowInstFindStateAndTransitionsResp, FlowInstModifyAssignedReq, FlowInstModifyCurrentVarsReq, FlowInstStartReq, FlowInstTransferReq, FlowInstTransferResp, FlowInstTransitionInfo, FlowOperationContext + FlowInstAbortReq, FlowInstBatchBindReq, FlowInstBatchBindResp, FlowInstBindReq, FlowInstDetailResp, FlowInstFindNextTransitionsReq, FlowInstFindStateAndTransitionsReq, + FlowInstFindStateAndTransitionsResp, FlowInstModifyAssignedReq, FlowInstModifyCurrentVarsReq, FlowInstStartReq, FlowInstTransferReq, FlowInstTransferResp, + FlowInstTransitionInfo, FlowOperationContext, }; use crate::flow_constants; use crate::helper::loop_check_helper; @@ -50,14 +52,16 @@ impl FlowCiInstApi { let mut result = FlowInstServ::get(&flow_inst_id.0, &funs, &ctx.0).await?; // @TODO 临时处理方式,后续需增加接口 result.transitions = Some( - FlowInstServ::find_next_transitions(&flow_inst_id.0, &FlowInstFindNextTransitionsReq { - vars: None, - }, &funs, &ctx.0).await?.into_iter().map(|tran| FlowInstTransitionInfo { - id: tran.next_flow_transition_id, - start_time: Utc::now(), - op_ctx: FlowOperationContext::default(), - output_message: Some(tran.next_flow_transition_name), - }).collect_vec() + FlowInstServ::find_next_transitions(&flow_inst_id.0, &FlowInstFindNextTransitionsReq { vars: None }, &funs, &ctx.0) + .await? + .into_iter() + .map(|tran| FlowInstTransitionInfo { + id: tran.next_flow_transition_id, + start_time: Utc::now(), + op_ctx: FlowOperationContext::default(), + output_message: Some(tran.next_flow_transition_name), + }) + .collect_vec(), ); ctx.0.execute_task().await?; TardisResp::ok(result) @@ -109,7 +113,15 @@ impl FlowCiInstApi { check_without_owner_and_unsafe_fill_ctx(request, &funs, &mut ctx.0)?; let mut transfer = transfer_req.0; FlowInstServ::check_transfer_vars(&flow_inst_id.0, &mut transfer, &funs, &ctx.0).await?; - let result = FlowInstServ::transfer(&flow_inst_id.0, &transfer, false, FlowExternalCallbackOp::Default, loop_check_helper::InstancesTransition::default(), &ctx.0).await?; + let result = FlowInstServ::transfer( + &flow_inst_id.0, + &transfer, + false, + FlowExternalCallbackOp::Default, + loop_check_helper::InstancesTransition::default(), + &ctx.0, + ) + .await?; ctx.0.execute_task().await?; TardisResp::ok(result) } @@ -138,7 +150,17 @@ impl FlowCiInstApi { flow_inst_id_transfer_map.insert(flow_inst_id, transfer_req); } for (flow_inst_id, transfer_req) in flow_inst_id_transfer_map { - result.push(FlowInstServ::transfer(flow_inst_id, &transfer_req, false, FlowExternalCallbackOp::Default, loop_check_helper::InstancesTransition::default(), &ctx.0).await?); + result.push( + FlowInstServ::transfer( + flow_inst_id, + &transfer_req, + false, + FlowExternalCallbackOp::Default, + loop_check_helper::InstancesTransition::default(), + &ctx.0, + ) + .await?, + ); } ctx.0.execute_task().await?; TardisResp::ok(result) diff --git a/backend/middlewares/flow/src/api/cs/flow_cs_config_api.rs b/backend/middlewares/flow/src/api/cs/flow_cs_config_api.rs index 2f90d44f8..3fea629a4 100644 --- a/backend/middlewares/flow/src/api/cs/flow_cs_config_api.rs +++ b/backend/middlewares/flow/src/api/cs/flow_cs_config_api.rs @@ -66,14 +66,22 @@ impl FlowCsConfigApi { ("TP", "idp_test"), ("TS", "idp_test"), ]); - let states = FlowStateServ::find_id_name_items(&FlowStateFilterReq { - basic: RbumBasicFilterReq { - ignore_scope: true, - with_sub_own_paths: true, + let states = FlowStateServ::find_id_name_items( + &FlowStateFilterReq { + basic: RbumBasicFilterReq { + ignore_scope: true, + with_sub_own_paths: true, + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, None, None, &funs, &global_ctx).await.unwrap(); + None, + None, + &funs, + &global_ctx, + ) + .await + .unwrap(); let mut page = 1; loop { let insts = FlowInstServ::paginate(None, None, None, None, Some(true), page, 200, &funs, &global_ctx).await.unwrap().records; @@ -83,28 +91,36 @@ impl FlowCsConfigApi { for inst in insts { let state_name = states.get(&inst.current_state_id).cloned().unwrap_or_default(); if let Some(table) = tag_search_map.get(&inst.tag.as_str()) { - SpiSearchClient::modify_item_and_name(table, &inst.rel_business_obj_id, &SearchItemModifyReq { - kind: None, - title: None, - name: None, - content: None, - owner: None, - own_paths: None, - create_time: None, - update_time: None, - ext: Some(json!({ - "status": state_name, - })), - ext_override: None, - visit_keys: None, - kv_disable: None, - }, &funs, &global_ctx).await.unwrap_or_default(); + SpiSearchClient::modify_item_and_name( + table, + &inst.rel_business_obj_id, + &SearchItemModifyReq { + kind: None, + title: None, + name: None, + content: None, + owner: None, + own_paths: None, + create_time: None, + update_time: None, + ext: Some(json!({ + "status": state_name, + })), + ext_override: None, + visit_keys: None, + kv_disable: None, + }, + &funs, + &global_ctx, + ) + .await + .unwrap_or_default(); } } page += 1; } }); ctx.0.execute_task().await?; - TardisResp::ok(Void{}) + TardisResp::ok(Void {}) } } diff --git a/backend/middlewares/flow/src/serv/clients.rs b/backend/middlewares/flow/src/serv/clients.rs index de8817060..7d5686866 100644 --- a/backend/middlewares/flow/src/serv/clients.rs +++ b/backend/middlewares/flow/src/serv/clients.rs @@ -1,2 +1,2 @@ -pub mod log_client; +pub mod flow_log_client; pub mod search_client; diff --git a/backend/middlewares/flow/src/serv/clients/log_client.rs b/backend/middlewares/flow/src/serv/clients/flow_log_client.rs similarity index 69% rename from backend/middlewares/flow/src/serv/clients/log_client.rs rename to backend/middlewares/flow/src/serv/clients/flow_log_client.rs index b470eb40b..89cec43a0 100644 --- a/backend/middlewares/flow/src/serv/clients/log_client.rs +++ b/backend/middlewares/flow/src/serv/clients/flow_log_client.rs @@ -1,13 +1,17 @@ -use bios_sdk_invoke::clients::spi_log_client::SpiLogClient; +use bios_sdk_invoke::clients::{ + iam_client::IamClient, + spi_log_client::{LogItemAddReq, SpiLogClient}, +}; use serde::Serialize; use serde_json::Value; use tardis::{ basic::{dto::TardisContext, result::TardisResult}, + chrono::{DateTime, Utc}, tokio, TardisFuns, TardisFunsInst, }; -use crate::flow_constants; +use crate::{flow_config::FlowConfig, flow_constants}; pub struct FlowLogClient; #[derive(Serialize, Default, Debug, Clone)] @@ -39,9 +43,11 @@ impl FlowLogClient { op_kind: Option, rel_key: Option, ctx: &TardisContext, + push: bool, ) -> TardisResult<()> { let ctx_clone = ctx.clone(); - ctx.add_async_task(Box::new(|| { + let push_clone = push; // 克隆 push 变量 + ctx.add_async_task(Box::new(move || { Box::pin(async move { let task_handle = tokio::spawn(async move { let funs = flow_constants::get_tardis_inst(); @@ -56,6 +62,7 @@ impl FlowLogClient { Some(tardis::chrono::Utc::now().to_rfc3339()), &funs, &ctx_clone, + push_clone, // 使用克隆的 push 变量 ) .await .unwrap(); @@ -78,26 +85,34 @@ impl FlowLogClient { ts: Option, funs: &TardisFunsInst, ctx: &TardisContext, + push: bool, ) -> TardisResult<()> { // generate log item let tag: String = tag.into(); let own_paths = if ctx.own_paths.len() < 2 { None } else { Some(ctx.own_paths.clone()) }; let owner = if ctx.owner.len() < 2 { None } else { Some(ctx.owner.clone()) }; - SpiLogClient::add_with_many_params( - &tag, - &TardisFuns::json.obj_to_string(&content).expect("req_msg not a valid json value"), - ext, + let owner_name = IamClient::new("", funs, &ctx, funs.conf::().invoke.module_urls.get("iam").expect("missing iam base url")) + .get_account(&ctx.owner, &ctx.own_paths) + .await? + .owner_name; + + let req = LogItemAddReq { + tag: tag.to_string(), + content: TardisFuns::json.obj_to_json(&content).expect("req_msg not a valid json value"), kind, + ext, key, op, rel_key, - ts, + idempotent_id: None, + ts: ts.map(|ts| DateTime::parse_from_rfc3339(&ts).unwrap_or_default().with_timezone(&Utc)), owner, own_paths, - funs, - ctx, - ) - .await?; + msg: None, + owner_name, + push: push, + }; + SpiLogClient::add(req, funs, ctx).await?; Ok(()) } } diff --git a/backend/middlewares/flow/src/serv/flow_event_serv.rs b/backend/middlewares/flow/src/serv/flow_event_serv.rs index bb54d621d..ee1716519 100644 --- a/backend/middlewares/flow/src/serv/flow_event_serv.rs +++ b/backend/middlewares/flow/src/serv/flow_event_serv.rs @@ -25,7 +25,8 @@ use crate::{ FlowTransitionActionByStateChangeInfo, FlowTransitionActionByVarChangeInfoChangedKind, FlowTransitionActionChangeAgg, FlowTransitionActionChangeKind, FlowTransitionFrontActionInfo, FlowTransitionFrontActionRightValue, StateChangeConditionOp, TagRelKind, }, - }, helper::loop_check_helper, + }, + helper::loop_check_helper, }; use super::{flow_external_serv::FlowExternalServ, flow_inst_serv::FlowInstServ, flow_model_serv::FlowModelServ, flow_state_serv::FlowStateServ}; @@ -37,7 +38,12 @@ pub struct FlowEventServ; impl FlowEventServ { #[async_recursion] - pub async fn do_front_change(flow_inst_id: &str, modified_instance_transations: loop_check_helper::InstancesTransition, ctx: &TardisContext, funs: &TardisFunsInst) -> TardisResult<()> { + pub async fn do_front_change( + flow_inst_id: &str, + modified_instance_transations: loop_check_helper::InstancesTransition, + ctx: &TardisContext, + funs: &TardisFunsInst, + ) -> TardisResult<()> { let flow_inst_detail = FlowInstServ::get(flow_inst_id, funs, ctx).await?; let flow_model = FlowModelServ::get_item( &flow_inst_detail.rel_flow_model_id, @@ -147,7 +153,13 @@ impl FlowEventServ { } } - pub async fn do_post_change(flow_inst_id: &str, flow_transition_id: &str, modified_instance_transations: loop_check_helper::InstancesTransition, ctx: &TardisContext, funs: &TardisFunsInst) -> TardisResult<()> { + pub async fn do_post_change( + flow_inst_id: &str, + flow_transition_id: &str, + modified_instance_transations: loop_check_helper::InstancesTransition, + ctx: &TardisContext, + funs: &TardisFunsInst, + ) -> TardisResult<()> { let flow_inst_detail = FlowInstServ::get(flow_inst_id, funs, ctx).await?; let global_ctx = TardisContext { own_paths: "".to_string(), @@ -311,7 +323,7 @@ impl FlowEventServ { funs, ) .await?; - FlowEventServ::do_front_change(&inst_id, modified_instance_transations.clone(),ctx, funs).await?; + FlowEventServ::do_front_change(&inst_id, modified_instance_transations.clone(), ctx, funs).await?; } } } else { diff --git a/backend/middlewares/flow/src/serv/flow_inst_serv.rs b/backend/middlewares/flow/src/serv/flow_inst_serv.rs index 9f0f16564..ddce8a063 100644 --- a/backend/middlewares/flow/src/serv/flow_inst_serv.rs +++ b/backend/middlewares/flow/src/serv/flow_inst_serv.rs @@ -31,7 +31,8 @@ use tardis::{ }; use crate::{ - domain::{flow_inst, flow_model, flow_transition}, dto::{ + domain::{flow_inst, flow_model, flow_transition}, + dto::{ flow_external_dto::{FlowExternalCallbackOp, FlowExternalParams}, flow_inst_dto::{ FlowInstAbortReq, FlowInstBatchBindReq, FlowInstBatchBindResp, FlowInstDetailResp, FlowInstFilterReq, FlowInstFindNextTransitionResp, FlowInstFindNextTransitionsReq, @@ -42,7 +43,10 @@ use crate::{ flow_state_dto::{FlowStateAggResp, FlowStateFilterReq, FlowStateRelModelExt, FlowSysStateKind}, flow_transition_dto::{FlowTransitionDetailResp, FlowTransitionFrontActionInfo}, flow_var_dto::FillType, - }, flow_constants, helper::loop_check_helper, serv::{flow_model_serv::FlowModelServ, flow_state_serv::FlowStateServ} + }, + flow_constants, + helper::loop_check_helper, + serv::{flow_model_serv::FlowModelServ, flow_state_serv::FlowStateServ}, }; use super::{ @@ -547,13 +551,7 @@ impl FlowInstServ { let mut modified_instance_transations_cp = modified_instance_transations.clone(); if !modified_instance_transations_cp.check(flow_inst_id.to_string(), transfer_req.flow_transition_id.clone()) { let flow_inst_detail = Self::get(flow_inst_id, &funs, ctx).await?; - return Self::gen_transfer_resp( - flow_inst_id, - &flow_inst_detail.current_state_id, - ctx, - &funs, - ) - .await; + return Self::gen_transfer_resp(flow_inst_id, &flow_inst_detail.current_state_id, ctx, &funs).await; } funs.begin().await?; let result = Self::do_transfer(flow_inst_id, transfer_req, skip_filter, callback_kind, &funs, ctx).await; @@ -1036,7 +1034,12 @@ impl FlowInstServ { } } - pub async fn modify_current_vars(flow_inst_id: &str, current_vars: &HashMap, modified_instance_transations: loop_check_helper::InstancesTransition, ctx: &TardisContext) -> TardisResult<()> { + pub async fn modify_current_vars( + flow_inst_id: &str, + current_vars: &HashMap, + modified_instance_transations: loop_check_helper::InstancesTransition, + ctx: &TardisContext, + ) -> TardisResult<()> { let mut funs = flow_constants::get_tardis_inst(); funs.begin().await?; let flow_inst_detail = Self::get(flow_inst_id, &funs, ctx).await?; @@ -1138,7 +1141,13 @@ impl FlowInstServ { ..global_ctx.clone() }; let new_vars = Self::get_new_vars(&flow_inst.id, funs, &ctx).await?; - Self::modify_current_vars(&flow_inst.id, &TardisFuns::json.json_to_obj::>(new_vars).unwrap_or_default(), loop_check_helper::InstancesTransition::default(), &ctx).await?; + Self::modify_current_vars( + &flow_inst.id, + &TardisFuns::json.json_to_obj::>(new_vars).unwrap_or_default(), + loop_check_helper::InstancesTransition::default(), + &ctx, + ) + .await?; } } @@ -1149,7 +1158,13 @@ impl FlowInstServ { let mut current_vars = Self::get(inst_id, funs, ctx).await?.current_vars; if current_vars.is_none() || !current_vars.clone().unwrap_or_default().contains_key(key) { let new_vars = Self::get_new_vars(inst_id, funs, ctx).await?; - Self::modify_current_vars(inst_id, &TardisFuns::json.json_to_obj::>(new_vars).unwrap_or_default(), loop_check_helper::InstancesTransition::default(), ctx).await?; + Self::modify_current_vars( + inst_id, + &TardisFuns::json.json_to_obj::>(new_vars).unwrap_or_default(), + loop_check_helper::InstancesTransition::default(), + ctx, + ) + .await?; current_vars = Self::get(inst_id, funs, ctx).await?.current_vars; } @@ -1167,15 +1182,19 @@ impl FlowInstServ { ) -> TardisResult<()> { let mut own_paths_list = vec![]; if let Some(rel_template_id) = rel_template_id { - own_paths_list = FlowRelServ::find_to_simple_rels(&FlowRelKind::FlowAppTemplate, &rel_template_id, None, None, funs, ctx).await?.into_iter().map(|rel| format!("{}/{}", rel.rel_own_paths, rel.rel_id)).collect_vec(); + own_paths_list = FlowRelServ::find_to_simple_rels(&FlowRelKind::FlowAppTemplate, &rel_template_id, None, None, funs, ctx) + .await? + .into_iter() + .map(|rel| format!("{}/{}", rel.rel_own_paths, rel.rel_id)) + .collect_vec(); + if own_paths_list.contains(&ctx.own_paths) { + own_paths_list = vec![ctx.own_paths.clone()]; + } } else { own_paths_list.push(ctx.own_paths.clone()); } for own_paths in own_paths_list { - let mock_ctx = TardisContext { - own_paths, - ..ctx.clone() - }; + let mock_ctx = TardisContext { own_paths, ..ctx.clone() }; Self::unsafe_modify_state(tag, modify_model_id, modify_model_states.clone(), state_id, funs, &mock_ctx).await?; Self::unsafe_modify_rel_model_id(tag, modify_model_id, funs, &mock_ctx).await?; } @@ -1183,12 +1202,7 @@ impl FlowInstServ { Ok(()) } - async fn unsafe_modify_rel_model_id( - tag: &str, - modify_model_id: &str, - funs: &TardisFunsInst, - ctx: &TardisContext, - ) -> TardisResult<()> { + async fn unsafe_modify_rel_model_id(tag: &str, modify_model_id: &str, funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { let mut update_statement = Query::update(); update_statement.table(flow_inst::Entity); update_statement.value(flow_inst::Column::RelFlowModelId, modify_model_id); @@ -1237,14 +1251,21 @@ impl FlowInstServ { ..Default::default() }; funs.db().update_one(flow_inst, &mock_ctx).await.unwrap(); - let model_tag = FlowModelServ::get_item(modify_model_id, &FlowModelFilterReq { - basic: RbumBasicFilterReq { - own_paths: Some("".to_string()), - with_sub_own_paths: true, + let model_tag = FlowModelServ::get_item( + modify_model_id, + &FlowModelFilterReq { + basic: RbumBasicFilterReq { + own_paths: Some("".to_string()), + with_sub_own_paths: true, + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, funs, ctx).await.map(|detail| detail.tag); + funs, + ctx, + ) + .await + .map(|detail| detail.tag); let next_flow_state = FlowStateServ::get_item( state_id, &FlowStateFilterReq { diff --git a/backend/middlewares/flow/src/serv/flow_model_serv.rs b/backend/middlewares/flow/src/serv/flow_model_serv.rs index 28657f96e..095596ee3 100644 --- a/backend/middlewares/flow/src/serv/flow_model_serv.rs +++ b/backend/middlewares/flow/src/serv/flow_model_serv.rs @@ -36,7 +36,9 @@ use crate::{ FlowModelModifyReq, FlowModelSummaryResp, }, flow_state_dto::{FlowStateAggResp, FlowStateDetailResp, FlowStateFilterReq, FlowStateRelModelExt, FlowStateRelModelModifyReq}, - flow_transition_dto::{FlowTransitionActionChangeKind, FlowTransitionAddReq, FlowTransitionDetailResp, FlowTransitionInitInfo, FlowTransitionModifyReq, FlowTransitionPostActionInfo}, + flow_transition_dto::{ + FlowTransitionActionChangeKind, FlowTransitionAddReq, FlowTransitionDetailResp, FlowTransitionInitInfo, FlowTransitionModifyReq, FlowTransitionPostActionInfo, + }, }, flow_config::FlowBasicInfoManager, flow_constants, @@ -46,7 +48,7 @@ use async_trait::async_trait; use super::{ clients::{ - log_client::{FlowLogClient, LogParamContent, LogParamTag}, + flow_log_client::{FlowLogClient, LogParamContent, LogParamTag}, search_client::FlowSearchClient, }, flow_inst_serv::FlowInstServ, @@ -130,7 +132,7 @@ impl RbumItemCrudOperation TardisResult<()> { let _: Void = client.put(&format!("/ci/manage/bs/{}/rel/u001", bs_id), &Void {}).await; Ok(()) -} \ No newline at end of file +} diff --git a/backend/middlewares/flow/tests/test_flow_scenes_fsm1.rs b/backend/middlewares/flow/tests/test_flow_scenes_fsm1.rs index d0760596d..12d54083c 100644 --- a/backend/middlewares/flow/tests/test_flow_scenes_fsm1.rs +++ b/backend/middlewares/flow/tests/test_flow_scenes_fsm1.rs @@ -14,16 +14,18 @@ use bios_mw_flow::dto::flow_state_dto::{FlowStateRelModelExt, FlowStateSummaryRe use bios_mw_flow::dto::flow_transition_dto::{FlowTransitionAddReq, FlowTransitionModifyReq}; use bios_sdk_invoke::clients::spi_kv_client::KvItemSummaryResp; -use bios_spi_search::dto::search_item_dto::{SearchItemQueryReq, SearchItemSearchCtxReq, SearchItemSearchPageReq, SearchItemSearchQScopeKind, SearchItemSearchReq, SearchItemSearchResp}; +use bios_spi_search::dto::search_item_dto::{ + SearchItemQueryReq, SearchItemSearchCtxReq, SearchItemSearchPageReq, SearchItemSearchQScopeKind, SearchItemSearchReq, SearchItemSearchResp, +}; use serde_json::json; use tardis::basic::dto::TardisContext; use std::time::Duration; use tardis::basic::result::TardisResult; use tardis::log::{debug, info}; +use tardis::tokio::time::sleep; use tardis::web::web_resp::{TardisPage, Void}; use tardis::TardisFuns; -use tardis::tokio::time::sleep; pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttpClient) -> TardisResult<()> { info!("【test_flow_scenes_fsm】"); @@ -190,12 +192,10 @@ pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttp &format!("/cc/model/{}", req_default_model_template_id.clone()), &FlowModelModifyReq { init_state_id: Some(init_state_id.to_string()), - bind_states: Some(vec![ - FlowModelBindStateReq { - state_id: init_state_id.clone(), - ext: FlowStateRelModelExt { sort: 1, show_btns: None }, - }, - ]), + bind_states: Some(vec![FlowModelBindStateReq { + state_id: init_state_id.clone(), + ext: FlowStateRelModelExt { sort: 1, show_btns: None }, + }]), add_transitions: None, ..Default::default() }, @@ -222,23 +222,26 @@ pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttp .await; let req_model_uninit_template_id = req_model_uninit_template_aggs.id.clone(); sleep(Duration::from_millis(500)).await; - let model_templates: TardisPage = search_client.put("/ci/item/search", &SearchItemSearchReq { - tag: "flow_model".to_string(), - ctx: SearchItemSearchCtxReq { - tenants: Some(vec![ctx.own_paths.clone()]), - ..Default::default() - }, - query: SearchItemQueryReq { - ..Default::default() - }, - adv_query: None, - sort: None, - page: SearchItemSearchPageReq { - number: 1, - size: 20, - fetch_total: true, - } - }).await; + let model_templates: TardisPage = search_client + .put( + "/ci/item/search", + &SearchItemSearchReq { + tag: "flow_model".to_string(), + ctx: SearchItemSearchCtxReq { + tenants: Some(vec![ctx.own_paths.clone()]), + ..Default::default() + }, + query: SearchItemQueryReq { ..Default::default() }, + adv_query: None, + sort: None, + page: SearchItemSearchPageReq { + number: 1, + size: 20, + fetch_total: true, + }, + }, + ) + .await; assert_eq!(model_templates.total_size, 3); assert!(model_templates.records.iter().any(|record| record.key == req_default_model_template_id)); assert!(model_templates.records.iter().any(|record| record.key == req_model_uninit_template_id)); @@ -255,24 +258,30 @@ pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttp ) .await; sleep(Duration::from_millis(500)).await; - let model_templates: TardisPage = search_client.put("/ci/item/search", &SearchItemSearchReq { - tag: "flow_model".to_string(), - ctx: SearchItemSearchCtxReq { - tenants: Some(vec![ctx.own_paths.clone()]), - ..Default::default() - }, - query: SearchItemQueryReq { - ..Default::default() - }, - adv_query: None, - sort: None, - page: SearchItemSearchPageReq { - number: 1, - size: 20, - fetch_total: true, - } - }).await; - assert_eq!(model_templates.records.iter().find(|record| record.key == req_default_model_template_id).unwrap().title, "测试需求默认模板11".to_string()); + let model_templates: TardisPage = search_client + .put( + "/ci/item/search", + &SearchItemSearchReq { + tag: "flow_model".to_string(), + ctx: SearchItemSearchCtxReq { + tenants: Some(vec![ctx.own_paths.clone()]), + ..Default::default() + }, + query: SearchItemQueryReq { ..Default::default() }, + adv_query: None, + sort: None, + page: SearchItemSearchPageReq { + number: 1, + size: 20, + fetch_total: true, + }, + }, + ) + .await; + assert_eq!( + model_templates.records.iter().find(|record| record.key == req_default_model_template_id).unwrap().title, + "测试需求默认模板11".to_string() + ); // creat share model template let _: Void = flow_client .patch( @@ -289,39 +298,67 @@ pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttp flow_client.set_auth(&ctx)?; search_client.set_auth(&ctx)?; sleep(Duration::from_millis(1000)).await; - let model_templates: TardisPage = search_client.put("/ci/item/search", &SearchItemSearchReq { - tag: "flow_model".to_string(), - ctx: SearchItemSearchCtxReq { - tenants: Some(vec![ctx.own_paths.clone(), "".to_string()]), - ..Default::default() - }, - query: SearchItemQueryReq { - ..Default::default() - }, - adv_query: None, - sort: None, - page: SearchItemSearchPageReq { - number: 1, - size: 20, - fetch_total: true, - } - }).await; + let model_templates: TardisPage = search_client + .put( + "/ci/item/search", + &SearchItemSearchReq { + tag: "flow_model".to_string(), + ctx: SearchItemSearchCtxReq { + tenants: Some(vec![ctx.own_paths.clone(), "".to_string()]), + ..Default::default() + }, + query: SearchItemQueryReq { ..Default::default() }, + adv_query: None, + sort: None, + page: SearchItemSearchPageReq { + number: 1, + size: 20, + fetch_total: true, + }, + }, + ) + .await; assert_eq!(model_templates.total_size, 1); assert_eq!(model_templates.records[0].key, req_default_model_template_id); + let copy_template_model: FlowModelAggResp = flow_client.patch(&format!("/cc/model/copy/{}", req_default_model_template_id.clone()), &json!({})).await; + sleep(Duration::from_millis(1000)).await; + let model_templates: TardisPage = search_client + .put( + "/ci/item/search", + &SearchItemSearchReq { + tag: "flow_model".to_string(), + ctx: SearchItemSearchCtxReq { + tenants: Some(vec![ctx.own_paths.clone(), "".to_string()]), + ..Default::default() + }, + query: SearchItemQueryReq { ..Default::default() }, + adv_query: None, + sort: None, + page: SearchItemSearchPageReq { + number: 1, + size: 20, + fetch_total: true, + }, + }, + ) + .await; + assert_eq!(model_templates.total_size, 2); + assert!(model_templates.records.iter().any(|record| record.key == req_default_model_template_id)); + assert!(model_templates.records.iter().any(|record| record.key == copy_template_model.id)); // project template bind flow model ctx.owner = "u001".to_string(); ctx.own_paths = "t1".to_string(); flow_client.set_auth(&ctx)?; search_client.set_auth(&ctx)?; - // + // let req_models: Vec = flow_client.get(&format!("/cc/model/find_by_rel_template_id?tag=REQ&template=true&rel_template_id={}", req_template_id1)).await; - assert_eq!(req_models.len(), 3); + assert_eq!(req_models.len(), 4); assert!(req_models.iter().any(|mdoel| mdoel.id == req_default_model_template_id)); assert!(req_models.iter().any(|mdoel| mdoel.id == req_model_template_id)); assert!(req_models.iter().all(|mdoel| mdoel.id != req_model_uninit_template_id)); let req_models: Vec = flow_client.get("/cc/model/find_by_rel_template_id?tag=REQ&template=true").await; - assert_eq!(req_models.len(), 2); + assert_eq!(req_models.len(), 3); assert!(req_models.iter().any(|mdoel| mdoel.id == req_default_model_template_id)); assert!(req_models.iter().all(|mdoel| mdoel.id != req_model_template_id)); ctx.owner = "u001".to_string(); @@ -329,7 +366,7 @@ pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttp flow_client.set_auth(&ctx)?; search_client.set_auth(&ctx)?; let req_models: Vec = flow_client.get("/cc/model/find_by_rel_template_id?tag=REQ&template=true").await; - assert_eq!(req_models.len(), 2); + assert_eq!(req_models.len(), 3); assert!(req_models.iter().any(|mdoel| mdoel.id == req_default_model_template_id)); assert!(req_models.iter().all(|mdoel| mdoel.id != req_model_template_id)); Ok(()) diff --git a/backend/middlewares/flow/tests/test_flow_scenes_fsm2.rs b/backend/middlewares/flow/tests/test_flow_scenes_fsm2.rs index 35149dd42..f4bb14d20 100644 --- a/backend/middlewares/flow/tests/test_flow_scenes_fsm2.rs +++ b/backend/middlewares/flow/tests/test_flow_scenes_fsm2.rs @@ -14,16 +14,18 @@ use bios_mw_flow::dto::flow_state_dto::{FlowStateRelModelExt, FlowStateSummaryRe use bios_mw_flow::dto::flow_transition_dto::{FlowTransitionAddReq, FlowTransitionModifyReq}; use bios_sdk_invoke::clients::spi_kv_client::KvItemSummaryResp; -use bios_spi_search::dto::search_item_dto::{SearchItemQueryReq, SearchItemSearchCtxReq, SearchItemSearchPageReq, SearchItemSearchQScopeKind, SearchItemSearchReq, SearchItemSearchResp}; +use bios_spi_search::dto::search_item_dto::{ + SearchItemQueryReq, SearchItemSearchCtxReq, SearchItemSearchPageReq, SearchItemSearchQScopeKind, SearchItemSearchReq, SearchItemSearchResp, +}; use serde_json::json; use tardis::basic::dto::TardisContext; use std::time::Duration; use tardis::basic::result::TardisResult; use tardis::log::{debug, info}; +use tardis::tokio::time::sleep; use tardis::web::web_resp::{TardisPage, Void}; use tardis::TardisFuns; -use tardis::tokio::time::sleep; pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttpClient) -> TardisResult<()> { info!("【test_flow_scenes_fsm】"); @@ -116,20 +118,23 @@ pub async fn test(flow_client: &mut TestHttpClient, search_client: &mut TestHttp .await; sleep(Duration::from_millis(500)).await; let req_default_model_template_id = req_default_model_template_aggs.id.clone(); - let model_templates: TardisPage = search_client.put("/ci/item/search", &SearchItemSearchReq { - tag: "flow_model".to_string(), - ctx: SearchItemSearchCtxReq::default(), - query: SearchItemQueryReq { - ..Default::default() - }, - adv_query: None, - sort: None, - page: SearchItemSearchPageReq { - number: 1, - size: 20, - fetch_total: true, - } - }).await; + let model_templates: TardisPage = search_client + .put( + "/ci/item/search", + &SearchItemSearchReq { + tag: "flow_model".to_string(), + ctx: SearchItemSearchCtxReq::default(), + query: SearchItemQueryReq { ..Default::default() }, + adv_query: None, + sort: None, + page: SearchItemSearchPageReq { + number: 1, + size: 20, + fetch_total: true, + }, + }, + ) + .await; debug!("model_templates: {:?}", model_templates); assert_eq!(model_templates.total_size, 2); assert!(model_templates.records.iter().any(|record| record.key == req_default_model_template_id)); diff --git a/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/schedule_event.rs b/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/schedule_event.rs index fb2bb625f..0c99aaf4f 100644 --- a/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/schedule_event.rs +++ b/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/schedule_event.rs @@ -99,11 +99,12 @@ impl EventComponent for ScheduleEventCenter { .publish( LogItemAddReq { tag: TASK_TAG.to_string(), - content: message, + content: tardis::serde_json::Value::Null, ext: Some(ext), key: Some(code.to_string()), op: Some(OP_EXECUTE_END.to_string()), ts: Some(Utc::now()), + msg: Some(message), ..Default::default() } .inject_context(&funs, &ctx), diff --git a/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/spi_log.rs b/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/spi_log.rs index 0de7df0a5..e89883a6d 100644 --- a/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/spi_log.rs +++ b/backend/middlewares/schedule/src/serv/schedule_job_serv_v2/event/spi_log.rs @@ -35,7 +35,7 @@ impl EventComponent for SpiLog { let code = code.to_string(); let _handle = tokio::spawn(async move { let result = SpiLogClient::add( - &LogItemAddReq { + LogItemAddReq { tag: JOB_TAG.to_string(), content: "add job".into(), key: Some(code.to_string()), @@ -60,7 +60,7 @@ impl EventComponent for SpiLog { let code = code.to_string(); let _handle = tokio::spawn(async move { let result = SpiLogClient::add( - &LogItemAddReq { + LogItemAddReq { tag: JOB_TAG.to_string(), content: "delete job".into(), key: Some(code.to_string()), @@ -85,7 +85,7 @@ impl EventComponent for SpiLog { let code = code.to_string(); let _handle = tokio::spawn(async move { let result = SpiLogClient::add( - &LogItemAddReq { + LogItemAddReq { tag: TASK_TAG.to_string(), content: "start request".into(), key: Some(code.to_string()), @@ -110,13 +110,14 @@ impl EventComponent for SpiLog { let code = code.to_string(); let _handle = tokio::spawn(async move { let result = SpiLogClient::add( - &LogItemAddReq { + LogItemAddReq { tag: TASK_TAG.to_string(), - content: message, + content: tardis::serde_json::Value::Null, ext: Some(ext), key: Some(code.to_string()), op: Some(OP_EXECUTE_END.to_string()), ts: Some(Utc::now()), + msg: Some(message), ..Default::default() }, &funs, diff --git a/backend/spi/spi-conf/src/serv.rs b/backend/spi/spi-conf/src/serv.rs index fe9a43983..efe08244d 100644 --- a/backend/spi/spi-conf/src/serv.rs +++ b/backend/spi/spi-conf/src/serv.rs @@ -100,7 +100,6 @@ lazy_static::lazy_static! { static ref MAP_CLEANER_TASK: OnceCell> = Default::default(); } - pub fn gen_md5(content: &str) -> String { use tardis::crypto::crypto_digest::TardisCryptoDigest; TardisCryptoDigest.md5(content).expect("md5 digest shouldn't fail") diff --git a/backend/spi/spi-conf/src/serv/pg/conf_pg_initializer.rs b/backend/spi/spi-conf/src/serv/pg/conf_pg_initializer.rs index 815e7850f..b8a21e5b3 100644 --- a/backend/spi/spi-conf/src/serv/pg/conf_pg_initializer.rs +++ b/backend/spi/spi-conf/src/serv/pg/conf_pg_initializer.rs @@ -21,6 +21,7 @@ pub async fn init_table_and_conn_namespace(bs_inst: TypedSpiBsInst<'_, TardisRel show_name character varying NOT NULL, description text, tp smallint NOT NULL DEFAULT 0"#, + None, vec![("show_name", "btree")], None, None, @@ -61,6 +62,7 @@ created_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, modified_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, tp character varying"# ), + None, vec![("data_id", "btree"), ("grp", "btree"), ("md5", "btree"), ("app_name", "btree")], None, Some("modified_time"), @@ -97,6 +99,7 @@ op_type character(1) NOT NULL DEFAULT 'I', config_tags text NOT NULL DEFAULT '', tp character varying"# ), + None, vec![("data_id", "btree"), ("grp", "btree"), ("md5", "btree"), ("app_name", "btree")], None, Some("modified_time"), @@ -105,7 +108,7 @@ tp character varying"# } pub async fn init_table_and_conn_tag(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, ctx: &TardisContext, mgr: bool) -> TardisResult<(TardisRelDBlConnection, String)> { - spi_initializer::common_pg::init_table_and_conn(bs_inst, ctx, mgr, None, "conf_tag", r#"id character varying PRIMARY KEY"#, vec![], None, None).await + spi_initializer::common_pg::init_table_and_conn(bs_inst, ctx, mgr, None, "conf_tag", r#"id character varying PRIMARY KEY"#, None, vec![], None, None).await } pub async fn init_table_and_conn_tag_config_rel( @@ -126,6 +129,7 @@ pub async fn init_table_and_conn_tag_config_rel( tag_id character varying NOT NULL REFERENCES {tag_table_name} ON DELETE CASCADE, config_id uuid NOT NULL REFERENCES {config_table_name} ON DELETE CASCADE"# ), + None, vec![], None, None, diff --git a/backend/spi/spi-conf/src/utils.rs b/backend/spi/spi-conf/src/utils.rs index cb3ae96aa..920f704b0 100644 --- a/backend/spi/spi-conf/src/utils.rs +++ b/backend/spi/spi-conf/src/utils.rs @@ -53,7 +53,6 @@ pub(crate) fn dot_env_parser(config: &str) -> HashMap { map } - #[cfg(test)] #[test] fn test_dot_env_parser() { @@ -68,5 +67,4 @@ URL=http://www.baidu.com assert_eq!(map.get("TYPE"), Some(&"ALPHA".to_string())); assert_eq!(map.get("VALUE"), Some(&"123".to_string())); assert_eq!(map.get("URL"), Some(&"http://www.baidu.com".to_string())); - -} \ No newline at end of file +} diff --git a/backend/spi/spi-graph/src/serv/pg/graph_pg_initializer.rs b/backend/spi/spi-graph/src/serv/pg/graph_pg_initializer.rs index 1e4a5493a..4dd6974cc 100644 --- a/backend/spi/spi-graph/src/serv/pg/graph_pg_initializer.rs +++ b/backend/spi/spi-graph/src/serv/pg/graph_pg_initializer.rs @@ -20,6 +20,7 @@ pub async fn init_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, ts timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, check (from_key <> to_key), unique (from_key, from_version, to_key, to_version, tag)"#, + None, vec![ ("tag", "btree"), ("from_key", "btree"), diff --git a/backend/spi/spi-kv/src/serv/pg/kv_pg_initializer.rs b/backend/spi/spi-kv/src/serv/pg/kv_pg_initializer.rs index 7c3dbc0ba..7ae77f047 100644 --- a/backend/spi/spi-kv/src/serv/pg/kv_pg_initializer.rs +++ b/backend/spi/spi-kv/src/serv/pg/kv_pg_initializer.rs @@ -20,6 +20,7 @@ pub async fn init_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, disable BOOLEAN NOT NULL, create_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, update_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP"#, + None, vec![("k", "btree"), ("v", "gin")], None, Some("update_time"), diff --git a/backend/spi/spi-log/Cargo.toml b/backend/spi/spi-log/Cargo.toml index 9c7c58f2f..726c6e580 100644 --- a/backend/spi/spi-log/Cargo.toml +++ b/backend/spi/spi-log/Cargo.toml @@ -20,13 +20,10 @@ spi-pg = ["tardis/reldb-postgres"] [dependencies] serde.workspace = true -tardis = { workspace = true, features = [ - "reldb-postgres", - "web-server", -] } +tardis = { workspace = true, features = ["reldb-postgres", "web-server"] } bios-basic = { path = "../../basic", features = ["default"] } bios-sdk-invoke = { path = "../../../frontend/sdks/invoke", features = [ - "event", + "event", ], default-features = false } [dev-dependencies] diff --git a/backend/spi/spi-log/src/api/ci/log_ci_item_api.rs b/backend/spi/spi-log/src/api/ci/log_ci_item_api.rs index 032988aa3..d31b90924 100644 --- a/backend/spi/spi-log/src/api/ci/log_ci_item_api.rs +++ b/backend/spi/spi-log/src/api/ci/log_ci_item_api.rs @@ -1,10 +1,12 @@ use tardis::web::context_extractor::TardisContextExtractor; use tardis::web::poem_openapi; +use tardis::web::poem_openapi::param::Path; use tardis::web::poem_openapi::payload::Json; -use tardis::web::web_resp::{TardisApiResult, TardisPage, TardisResp}; +use tardis::web::web_resp::{TardisApiResult, TardisPage, TardisResp, Void}; -use crate::dto::log_item_dto::{LogItemAddReq, LogItemFindReq, LogItemFindResp}; +use crate::dto::log_item_dto::{LogConfigReq, LogItemAddReq, LogItemFindReq, LogItemFindResp}; use crate::serv::log_item_serv; +use tardis::serde_json::Value; #[derive(Clone)] pub struct LogCiItemApi; @@ -27,4 +29,28 @@ impl LogCiItemApi { let resp = log_item_serv::find(&mut find_req.0, &funs, &ctx.0).await?; TardisResp::ok(resp) } + + /// Modify Item ext by key + #[oai(path = "/modify/:tag/:key/ext", method = "post")] + async fn modify_ext(&self, tag: Path, key: Path, mut ext: Json, ctx: TardisContextExtractor) -> TardisApiResult { + let funs = crate::get_tardis_inst(); + log_item_serv::modify_ext(&tag.0, &key.0, &mut ext.0, &funs, &ctx.0).await?; + TardisResp::ok(Void {}) + } + + /// Add config + #[oai(path = "/config", method = "post")] + async fn add_config(&self, mut find_req: Json, ctx: TardisContextExtractor) -> TardisApiResult { + let funs = crate::get_tardis_inst(); + log_item_serv::add_config(&mut find_req.0, &funs, &ctx.0).await?; + TardisResp::ok(Void {}) + } + + /// Delete config + #[oai(path = "/config", method = "delete")] + async fn delete_config(&self, mut find_req: Json, ctx: TardisContextExtractor) -> TardisApiResult { + let funs = crate::get_tardis_inst(); + log_item_serv::delete_config(&mut find_req.0, &funs, &ctx.0).await?; + TardisResp::ok(Void {}) + } } diff --git a/backend/spi/spi-log/src/dto/log_item_dto.rs b/backend/spi/spi-log/src/dto/log_item_dto.rs index d02f74278..824f377b3 100644 --- a/backend/spi/spi-log/src/dto/log_item_dto.rs +++ b/backend/spi/spi-log/src/dto/log_item_dto.rs @@ -7,12 +7,11 @@ use tardis::{ web::poem_openapi, }; -#[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] +#[derive(poem_openapi::Object, Serialize, Deserialize, Clone, Debug)] pub struct LogItemAddReq { #[oai(validator(pattern = r"^[a-z0-9_]+$"))] pub tag: String, - // #[oai(validator(min_length = "2"))] - pub content: String, + pub content: Value, #[oai(validator(min_length = "2"))] pub kind: Option, pub ext: Option, @@ -23,11 +22,15 @@ pub struct LogItemAddReq { #[oai(validator(min_length = "2"))] pub rel_key: Option, #[oai(validator(min_length = "2"))] - pub id: Option, + pub idempotent_id: Option, pub ts: Option>, #[oai(validator(min_length = "2"))] pub owner: Option, + #[oai(validator(min_length = "1"))] + pub owner_name: Option, pub own_paths: Option, + pub push: bool, + pub msg: Option, } impl From for LogItemAddReq { fn from(value: bios_sdk_invoke::clients::spi_log_client::LogItemAddReq) -> Self { @@ -39,10 +42,13 @@ impl From for LogItemAd key: value.key.map(Into::into), op: value.op, rel_key: value.rel_key.map(Into::into), - id: value.id, + idempotent_id: value.idempotent_id, ts: value.ts, owner: value.owner, own_paths: value.own_paths, + msg: value.msg, + owner_name: value.owner_name, + push: value.push, } } } @@ -86,15 +92,54 @@ pub struct AdvBasicQueryCondInfo { #[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] pub struct LogItemFindResp { - #[oai(validator(min_length = "2"))] - pub content: String, + pub content: Value, pub kind: String, pub ext: Value, pub owner: String, + pub owner_name: String, pub own_paths: String, pub id: String, pub key: String, pub op: String, pub rel_key: String, pub ts: DateTime, + pub msg: String, +} + +#[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] +pub struct LogConfigReq { + #[oai(validator(pattern = r"^[a-z0-9_]+$"))] + pub tag: String, + pub ref_field: String, +} + +#[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] +pub struct StatsItemAddReq { + #[oai(validator(min_length = "2"))] + pub idempotent_id: Option, + #[oai(validator(pattern = r"^[a-z0-9_]+$"))] + pub tag: String, + pub content: Value, + pub ext: Option, + #[oai(validator(min_length = "2"))] + pub key: Option, + pub ts: Option>, + #[oai(validator(min_length = "2"))] + pub owner: Option, + pub own_paths: Option, +} + +impl From for StatsItemAddReq { + fn from(value: LogItemAddReq) -> Self { + StatsItemAddReq { + idempotent_id: value.idempotent_id, + tag: value.tag, + content: value.content, + ext: value.ext, + key: value.key, + ts: value.ts, + owner: value.owner, + own_paths: value.own_paths, + } + } } diff --git a/backend/spi/spi-log/src/event.rs b/backend/spi/spi-log/src/event.rs index 383f34895..ca93d1e8e 100644 --- a/backend/spi/spi-log/src/event.rs +++ b/backend/spi/spi-log/src/event.rs @@ -1,7 +1,10 @@ -use crate::{log_initializer::get_tardis_inst, serv}; +use crate::{dto::log_item_dto::StatsItemAddReq, log_initializer::get_tardis_inst, serv}; use bios_sdk_invoke::clients::{ - event_client::{get_topic, mq_error, ContextHandler, SPI_RPC_TOPIC}, - spi_log_client::{event::LOG_AVATAR, LogItemAddReq}, + event_client::{ + asteroid_mq::prelude::{EventAttribute, Subject}, + get_topic, mq_error, ContextHandler, SPI_RPC_TOPIC, + }, + spi_log_client::LogItemAddReq, }; use tardis::{ basic::{dto::TardisContext, result::TardisResult}, @@ -20,7 +23,9 @@ pub async fn handle_events() -> TardisResult<()> { if let Some(topic) = get_topic(&SPI_RPC_TOPIC) { topic.create_endpoint([Interest::new("log/*")]).await.map_err(mq_error)?.create_event_loop().with_handler(ContextHandler(handle_add_event)).spawn(); } - // let topic = get_topic(&SPI_RPC_TOPIC).expect("topic not initialized"); Ok(()) } +impl EventAttribute for StatsItemAddReq { + const SUBJECT: Subject = Subject::const_new("stats/add"); +} diff --git a/backend/spi/spi-log/src/log_constants.rs b/backend/spi/spi-log/src/log_constants.rs index 8e0a115e3..a3cb43cdf 100644 --- a/backend/spi/spi-log/src/log_constants.rs +++ b/backend/spi/spi-log/src/log_constants.rs @@ -1,3 +1,15 @@ pub const DOMAIN_CODE: &str = "spi-log"; pub const EVENT_ADD_LOG: &str = "spi-log/add"; + +//log表的flag +pub const TABLE_LOG_FLAG: &str = "log"; +//pg v2 spi kind code +pub const SPI_PG_V2_KIND_CODE: &str = "spi-bs-pg-v2"; +pub const TABLE_LOG_FLAG_V2: &str = "logv2"; +//父表表名 +pub const PARENT_TABLE_NAME: &str = "spi_log_parent"; +//配置表名 +pub const CONFIG_TABLE_NAME: &str = "spi_log_config"; +//ref flag __STARSYS_LOG_REF__@{ts}#{key} +pub const LOG_REF_FLAG: &str = "__STARSYS_LOG_REF__"; diff --git a/backend/spi/spi-log/src/log_initializer.rs b/backend/spi/spi-log/src/log_initializer.rs index 932141bb6..54d8e95f6 100644 --- a/backend/spi/spi-log/src/log_initializer.rs +++ b/backend/spi/spi-log/src/log_initializer.rs @@ -6,7 +6,11 @@ use tardis::{ TardisFuns, TardisFunsInst, }; -use crate::{api::ci::log_ci_item_api, log_config::LogConfig, log_constants::DOMAIN_CODE}; +use crate::{ + api::ci::log_ci_item_api, + log_config::LogConfig, + log_constants::{self, DOMAIN_CODE}, +}; pub async fn init(web_server: &TardisWebServer) -> TardisResult<()> { info!("[BIOS.Log] Module initializing"); @@ -24,6 +28,7 @@ pub async fn init(web_server: &TardisWebServer) -> TardisResult<()> { async fn init_db(funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { spi_initializer::add_kind(spi_constants::SPI_PG_KIND_CODE, funs, ctx).await?; + spi_initializer::add_kind(log_constants::SPI_PG_V2_KIND_CODE, funs, ctx).await?; Ok(()) } @@ -37,6 +42,7 @@ pub async fn init_fun(bs_cert: SpiBsCertResp, ctx: &TardisContext, mgr: bool) -> let inst = match bs_cert.kind_code.as_str() { #[cfg(feature = "spi-pg")] spi_constants::SPI_PG_KIND_CODE => spi_initializer::common_pg::init(&bs_cert, ctx, mgr).await, + log_constants::SPI_PG_V2_KIND_CODE => spi_initializer::common_pg::init(&bs_cert, ctx, mgr).await, _ => Err(bs_cert.bs_not_implemented())?, }?; info!("[BIOS.Log] Fun [{}]({}) initialized", bs_cert.kind_code, bs_cert.conn_uri); diff --git a/backend/spi/spi-log/src/serv.rs b/backend/spi/spi-log/src/serv.rs index c256c1282..cd156a3d1 100644 --- a/backend/spi/spi-log/src/serv.rs +++ b/backend/spi/spi-log/src/serv.rs @@ -1,2 +1,4 @@ pub mod log_item_serv; +#[deprecated] pub mod pg; +pub mod pgv2; diff --git a/backend/spi/spi-log/src/serv/log_item_serv.rs b/backend/spi/spi-log/src/serv/log_item_serv.rs index c99f48ce0..f37836904 100644 --- a/backend/spi/spi-log/src/serv/log_item_serv.rs +++ b/backend/spi/spi-log/src/serv/log_item_serv.rs @@ -1,14 +1,17 @@ use tardis::basic::result::TardisResult; -use tardis::web::web_resp::TardisPage; use bios_basic::spi::spi_constants; use bios_basic::spi::spi_funs::SpiBsInstExtractor; use bios_basic::spi_dispatch_service; -use crate::dto::log_item_dto::{LogItemAddReq, LogItemFindReq, LogItemFindResp}; +use crate::dto::log_item_dto::{LogConfigReq, LogItemAddReq, LogItemFindReq, LogItemFindResp}; use crate::log_initializer; +use tardis::web::web_resp::TardisPage; +use super::super::log_constants; use super::pg; +use super::pgv2; +use tardis::serde_json::Value; spi_dispatch_service! { @mgr: true, @@ -16,9 +19,13 @@ spi_dispatch_service! { @dispatch: { #[cfg(feature = "spi-pg")] spi_constants::SPI_PG_KIND_CODE => pg::log_pg_item_serv, + log_constants::SPI_PG_V2_KIND_CODE => pgv2::log_pg_item_serv, }, @method: { add(add_req: &mut LogItemAddReq) -> TardisResult; find(find_req: &mut LogItemFindReq) -> TardisResult>; + modify_ext(tag: &str,key: &str, ext: &mut Value) -> TardisResult<()>; + add_config(config: &mut LogConfigReq) -> TardisResult<()>; + delete_config(config: &mut LogConfigReq) -> TardisResult<()>; } } diff --git a/backend/spi/spi-log/src/serv/pg/log_pg_initializer.rs b/backend/spi/spi-log/src/serv/pg/log_pg_initializer.rs index d5b7fc8ba..e01ce5d78 100644 --- a/backend/spi/spi-log/src/serv/pg/log_pg_initializer.rs +++ b/backend/spi/spi-log/src/serv/pg/log_pg_initializer.rs @@ -11,7 +11,7 @@ pub async fn init_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, ctx, mgr, Some(tag), - "log", + crate::log_constants::TABLE_LOG_FLAG, r#"ts timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, id character varying NOT NULL, key character varying NOT NULL, @@ -22,6 +22,7 @@ pub async fn init_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, own_paths character varying NOT NULL, ext jsonb NOT NULL, rel_key character varying NOT NULL"#, + None, vec![ ("kind", "btree"), ("ts", "btree"), diff --git a/backend/spi/spi-log/src/serv/pg/log_pg_item_serv.rs b/backend/spi/spi-log/src/serv/pg/log_pg_item_serv.rs index a8f1d0f8e..4196b1cb1 100644 --- a/backend/spi/spi-log/src/serv/pg/log_pg_item_serv.rs +++ b/backend/spi/spi-log/src/serv/pg/log_pg_item_serv.rs @@ -1,24 +1,25 @@ use tardis::{ basic::{dto::TardisContext, result::TardisResult}, db::{reldb_client::TardisRelDBClient, sea_orm::Value}, + serde_json::Value as JsonValue, web::web_resp::TardisPage, TardisFuns, TardisFunsInst, }; use bios_basic::{dto::BasicQueryCondInfo, enumeration::BasicQueryOpKind, helper::db_helper, spi::spi_funs::SpiBsInst}; -use crate::dto::log_item_dto::{AdvBasicQueryCondInfo, LogItemAddReq, LogItemFindReq, LogItemFindResp}; +use crate::dto::log_item_dto::{AdvBasicQueryCondInfo, LogConfigReq, LogItemAddReq, LogItemFindReq, LogItemFindResp}; use super::log_pg_initializer; pub async fn add(add_req: &mut LogItemAddReq, _funs: &TardisFunsInst, ctx: &TardisContext, inst: &SpiBsInst) -> TardisResult { - let id = add_req.id.clone().unwrap_or(TardisFuns::field.nanoid()); + let id = add_req.idempotent_id.clone().unwrap_or(TardisFuns::field.nanoid()); let mut params = vec![ Value::from(id.clone()), Value::from(add_req.kind.as_ref().unwrap_or(&"".into()).to_string()), Value::from(add_req.key.as_ref().unwrap_or(&"".into()).to_string()), Value::from(add_req.op.as_ref().unwrap_or(&"".to_string()).as_str()), - Value::from(add_req.content.as_str()), + Value::from(TardisFuns::json.json_to_string(add_req.content.clone())?.as_str()), Value::from(add_req.owner.as_ref().unwrap_or(&"".to_string()).as_str()), Value::from(add_req.own_paths.as_ref().unwrap_or(&"".to_string()).as_str()), Value::from(if let Some(ext) = &add_req.ext { @@ -481,17 +482,21 @@ ORDER BY ts DESC if total_size == 0 { total_size = item.try_get("", "total")?; } + let content: String = item.try_get("", "content")?; + let content = TardisFuns::json.str_to_json(&content)?; Ok(LogItemFindResp { ts: item.try_get("", "ts")?, id: item.try_get("", "id")?, key: item.try_get("", "key")?, op: item.try_get("", "op")?, ext: item.try_get("", "ext")?, - content: item.try_get("", "content")?, + content, rel_key: item.try_get("", "rel_key")?, kind: item.try_get("", "kind")?, owner: item.try_get("", "owner")?, own_paths: item.try_get("", "own_paths")?, + msg: String::new(), + owner_name: String::new(), }) }) .collect::>>()?; @@ -503,3 +508,15 @@ ORDER BY ts DESC records: result, }) } + +pub async fn modify_ext(_tag: &str, _key: &str, _ext: &mut JsonValue, _funs: &TardisFunsInst, _ctx: &TardisContext, _inst: &SpiBsInst) -> TardisResult<()> { + Ok(()) +} + +pub async fn add_config(_req: &LogConfigReq, _funs: &TardisFunsInst, _ctx: &TardisContext, _inst: &SpiBsInst) -> TardisResult<()> { + Ok(()) +} + +pub async fn delete_config(_config: &mut LogConfigReq, _funs: &TardisFunsInst, _ctx: &TardisContext, _inst: &SpiBsInst) -> TardisResult<()> { + Ok(()) +} diff --git a/backend/spi/spi-log/src/serv/pgv2.rs b/backend/spi/spi-log/src/serv/pgv2.rs new file mode 100644 index 000000000..e0fb6fd85 --- /dev/null +++ b/backend/spi/spi-log/src/serv/pgv2.rs @@ -0,0 +1,2 @@ +pub mod log_pg_initializer; +pub mod log_pg_item_serv; diff --git a/backend/spi/spi-log/src/serv/pgv2/log_pg_initializer.rs b/backend/spi/spi-log/src/serv/pgv2/log_pg_initializer.rs new file mode 100644 index 000000000..02b736c79 --- /dev/null +++ b/backend/spi/spi-log/src/serv/pgv2/log_pg_initializer.rs @@ -0,0 +1,96 @@ +use tardis::{ + basic::{dto::TardisContext, result::TardisResult}, + db::reldb_client::{TardisRelDBClient, TardisRelDBlConnection}, +}; + +use bios_basic::spi::{spi_funs::TypedSpiBsInst, spi_initializer}; + +use crate::log_constants::{self, CONFIG_TABLE_NAME}; + +pub async fn init_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, tag: &str, ctx: &TardisContext, mgr: bool) -> TardisResult<(TardisRelDBlConnection, String)> { + //添加父表 + let schema_name = spi_initializer::common_pg::get_schema_name_from_context(ctx); + bs_inst + .0 + .conn() + .execute_one( + &format!( + r#"CREATE TABLE IF NOT EXISTS {schema_name}.{}( + idempotent_id varchar NOT NULL, + ts timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, + key varchar NOT NULL, + kind varchar NOT NULL, + tag varchar NOT NULL, + op varchar NOT NULL, + content jsonb NOT NULL, + owner varchar NOT NULL, + owner_name varchar NOT NULL, + own_paths varchar NOT NULL, + push boolean NOT NULL DEFAULT false, + rel_key varchar NOT NULL, + ext jsonb NOT NULL, + disable boolean NOT NULL DEFAULT false, + msg varchar NOT NULL + );"#, + log_constants::PARENT_TABLE_NAME + ), + vec![], + ) + .await?; + + //添加配置表 + bs_inst + .0 + .conn() + .execute_one( + &format!( + r#"CREATE TABLE IF NOT EXISTS {schema_name}.{CONFIG_TABLE_NAME}( + table_name VARCHAR NOT NULL, + ref_field VARCHAR NOT NULL + );"# + ), + vec![], + ) + .await?; + + //添加配置表索引 + bs_inst + .0 + .conn() + .execute_one( + &format!( + r#" + CREATE INDEX IF NOT EXISTS {CONFIG_TABLE_NAME}_index1 ON {schema_name}.{CONFIG_TABLE_NAME} USING btree (table_name); + "# + ), + vec![], + ) + .await?; + + spi_initializer::common_pg::init_table_and_conn( + bs_inst, + ctx, + mgr, + Some(tag), + log_constants::TABLE_LOG_FLAG_V2, + "", + Some(format!("{schema_name}.{}", crate::log_constants::PARENT_TABLE_NAME)), + vec![ + ("kind", "btree"), + ("ts", "btree"), + ("key", "btree"), + ("content", "gin"), + ("ext", "gin"), + ("owner", "btree"), + ("own_paths", "btree"), + ("rel_key", "btree"), + ("idempotent_id", "btree"), + ("disable", "btree"), + ("tag", "btree"), + ("push", "btree"), + ], + None, + None, + ) + .await +} diff --git a/backend/spi/spi-log/src/serv/pgv2/log_pg_item_serv.rs b/backend/spi/spi-log/src/serv/pgv2/log_pg_item_serv.rs new file mode 100644 index 000000000..be8904c17 --- /dev/null +++ b/backend/spi/spi-log/src/serv/pgv2/log_pg_item_serv.rs @@ -0,0 +1,734 @@ +use std::{collections::HashMap, str::FromStr, vec}; + +use bios_sdk_invoke::clients::event_client::{get_topic, mq_error, EventAttributeExt as _, SPI_RPC_TOPIC}; +use tardis::{ + basic::{dto::TardisContext, error::TardisError, result::TardisResult}, + chrono::{DateTime, Utc}, + db::{ + reldb_client::{TardisRelDBClient, TardisRelDBlConnection}, + sea_orm::Value, + }, + futures::TryFutureExt as _, + serde_json::{self, Value as JsonValue}, + web::web_resp::TardisPage, + TardisFuns, TardisFunsInst, +}; + +use bios_basic::{ + dto::BasicQueryCondInfo, + enumeration::BasicQueryOpKind, + helper::db_helper, + spi::{spi_funs::SpiBsInst, spi_initializer::common_pg::get_schema_name_from_ext}, +}; + +use crate::{ + dto::log_item_dto::{AdvBasicQueryCondInfo, LogConfigReq, LogItemAddReq, LogItemFindReq, LogItemFindResp, StatsItemAddReq}, + log_constants::{CONFIG_TABLE_NAME, LOG_REF_FLAG, TABLE_LOG_FLAG_V2}, +}; + +use super::log_pg_initializer; + +pub async fn add(add_req: &mut LogItemAddReq, funs: &TardisFunsInst, ctx: &TardisContext, inst: &SpiBsInst) -> TardisResult { + let id = add_req.idempotent_id.clone().unwrap_or(TardisFuns::field.nanoid()); + + let bs_inst = inst.inst::(); + let mut insert_content = add_req.content.clone(); + let (mut conn, table_name) = log_pg_initializer::init_table_and_conn(bs_inst, &add_req.tag, ctx, true).await?; + conn.begin().await?; + let ref_fields = get_ref_fields_by_table_name(&conn, &get_schema_name_from_ext(&inst.ext).expect("ignore"), &table_name).await?; + if let Some(key) = add_req.key.as_ref() { + let get_last_record = conn + .query_one( + &format!( + r#" + select ts,key,content from {table_name} where key = $1 order by ts desc limit 1 + "# + ), + vec![Value::from(key.to_string())], + ) + .await?; + + if let Some(last_record) = get_last_record { + let last_content: JsonValue = last_record.try_get("", "content")?; + let last_ts: DateTime = last_record.try_get("", "ts")?; + let last_key: String = last_record.try_get("", "key")?; + + insert_content = last_content; + for ref_field in &ref_fields { + if let Some(field_value) = insert_content.get_mut(ref_field) { + if !is_log_ref(field_value) { + *field_value = JsonValue::String(get_ref_filed_value(&last_ts, &last_key)); + } + } + } + + if let (Some(insert_content), Some(add_req_content)) = (insert_content.as_object_mut(), add_req.content.as_object()) { + for (k, v) in add_req_content { + insert_content.insert(k.to_string(), v.clone()); + } + } + } + } + + let mut params = vec![ + Value::from(id.clone()), + Value::from(add_req.kind.as_ref().unwrap_or(&"".into()).to_string()), + Value::from(add_req.key.as_ref().unwrap_or(&"".into()).to_string()), + Value::from(add_req.tag.clone()), + Value::from(add_req.op.as_ref().unwrap_or(&"".to_string()).as_str()), + Value::from(insert_content), + Value::from(add_req.owner.as_ref().unwrap_or(&"".to_string()).as_str()), + Value::from(add_req.owner_name.as_ref().unwrap_or(&"".to_string()).as_str()), + Value::from(add_req.own_paths.as_ref().unwrap_or(&"".to_string()).as_str()), + Value::from(add_req.push), + Value::from(if let Some(ext) = &add_req.ext { + ext.clone() + } else { + TardisFuns::json.str_to_json("{}")? + }), + Value::from(add_req.rel_key.as_ref().unwrap_or(&"".into()).to_string()), + Value::from(add_req.msg.as_ref().unwrap_or(&"".into()).as_str()), + ]; + if let Some(ts) = add_req.ts { + params.push(Value::from(ts)); + } + conn.execute_one( + &format!( + r#"INSERT INTO {table_name} + (idempotent_id, kind, key, tag, op, content, owner, owner_name, own_paths, push, ext, rel_key, msg{}) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13{}) +"#, + if add_req.ts.is_some() { ", ts" } else { "" }, + if add_req.ts.is_some() { ", $14" } else { "" }, + ), + params, + ) + .await?; + conn.commit().await?; + //if push is true, then push to EDA + if add_req.push { + push_to_eda(&add_req, &ref_fields, funs, ctx).await?; + } + Ok(id) +} + +fn get_ref_filed_value(ref_log_record_ts: &DateTime, ref_log_record_key: &str) -> String { + let ref_log_record_ts = ref_log_record_ts.to_string(); + format!("{LOG_REF_FLAG}@{ref_log_record_ts}#{ref_log_record_key}") +} + +/// check if the value is referenced +/// true if the value is referenced +fn is_log_ref(value: &JsonValue) -> bool { + if let Some(value_str) = value.as_str() { + if value_str.starts_with(LOG_REF_FLAG) { + return true; + } + } + false +} + +fn parse_ref_ts_key(ref_key: &str) -> TardisResult<(DateTime, String)> { + let split_vec: Vec<&str> = ref_key.split("@").collect(); + if split_vec.len() != 2 { + return Err(TardisError::format_error(&format!("ref_key:{ref_key} format error"), "")); + } + let split_vec: Vec<&str> = split_vec[1].split("#").collect(); + if split_vec.len() != 2 { + return Err(TardisError::format_error(&format!("ref_key:{ref_key} format error"), "")); + } + Ok(( + DateTime::from_str(split_vec[0]).map_err(|e| TardisError::wrap(&format!("parse ts:{} error:{e}", split_vec[0]), ""))?, + split_vec[1].to_string(), + )) +} + +pub async fn find(find_req: &mut LogItemFindReq, funs: &TardisFunsInst, ctx: &TardisContext, inst: &SpiBsInst) -> TardisResult> { + let mut where_fragments: Vec = Vec::new(); + let mut sql_vals: Vec = vec![]; + + if let Some(kinds) = &find_req.kinds { + let place_holder = kinds + .iter() + .map(|kind| { + sql_vals.push(Value::from(kind.to_string())); + format!("${}", sql_vals.len()) + }) + .collect::>() + .join(","); + where_fragments.push(format!("kind IN ({place_holder})")); + } + if let Some(owners) = &find_req.owners { + let place_holder = owners + .iter() + .map(|owner| { + sql_vals.push(Value::from(owner.to_string())); + format!("${}", sql_vals.len()) + }) + .collect::>() + .join(","); + where_fragments.push(format!("owner IN ({place_holder})")); + } + if let Some(keys) = &find_req.keys { + let place_holder = keys + .iter() + .map(|key| { + sql_vals.push(Value::from(key.to_string())); + format!("${}", sql_vals.len()) + }) + .collect::>() + .join(","); + where_fragments.push(format!("key IN ({place_holder})")); + } + if let Some(ops) = &find_req.ops { + let place_holder = ops + .iter() + .map(|op| { + sql_vals.push(Value::from(op.as_str())); + format!("${}", sql_vals.len()) + }) + .collect::>() + .join(","); + where_fragments.push(format!("op IN ({place_holder})")); + } + if let Some(rel_keys) = &find_req.rel_keys { + let place_holder = rel_keys + .iter() + .map(|rel_key| { + sql_vals.push(Value::from(rel_key.to_string())); + format!("${}", sql_vals.len()) + }) + .collect::>() + .join(","); + where_fragments.push(format!("rel_key IN ({place_holder})")); + } + if let Some(own_paths) = &find_req.own_paths { + sql_vals.push(Value::from(format!("{}%", own_paths))); + where_fragments.push(format!("own_paths like ${}", sql_vals.len())); + } + if let Some(ts_start) = find_req.ts_start { + sql_vals.push(Value::from(ts_start)); + where_fragments.push(format!("ts >= ${}", sql_vals.len())); + } + if let Some(ts_end) = find_req.ts_end { + sql_vals.push(Value::from(ts_end)); + where_fragments.push(format!("ts <= ${}", sql_vals.len())); + } + let err_notfound = |ext: &BasicQueryCondInfo| { + Err(funs.err().not_found( + "item", + "log", + &format!("The ext field=[{}] value=[{}] operation=[{}] is not legal.", &ext.field, ext.value, &ext.op,), + "404-spi-log-op-not-legal", + )) + }; + let err_op_in_without_value = || Err(funs.err().bad_request("item", "log", "Request item using 'IN' operator show hava a value", "400-spi-item-op-in-without-value")); + if let Some(ext) = &find_req.ext { + for ext_item in ext { + let value = db_helper::json_to_sea_orm_value(&ext_item.value, &ext_item.op); + let Some(mut value) = value else { + return err_notfound(ext_item); + }; + if ext_item.op == BasicQueryOpKind::In { + if value.len() == 1 { + where_fragments.push(format!("ext -> '{}' ? ${}", ext_item.field, sql_vals.len() + 1)); + } else { + where_fragments.push(format!( + "ext -> '{}' ?| array[{}]", + ext_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(", ") + )); + } + for val in value { + sql_vals.push(val); + } + } else if ext_item.op == BasicQueryOpKind::NotIn { + let value = value.clone(); + if value.len() == 1 { + where_fragments.push(format!("not (ext -> '{}' ? ${})", ext_item.field, sql_vals.len() + 1)); + } else { + where_fragments.push(format!( + "not (ext -> '{}' ?| array[{}])", + ext_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(", ") + )); + } + for val in value { + sql_vals.push(val); + } + } else if ext_item.op == BasicQueryOpKind::IsNull { + where_fragments.push(format!("ext ->> '{}' is null", ext_item.field)); + } else if ext_item.op == BasicQueryOpKind::IsNotNull { + where_fragments.push(format!("(ext ->> '{}' is not null or ext ->> '{}' != '')", ext_item.field, ext_item.field)); + } else if ext_item.op == BasicQueryOpKind::IsNullOrEmpty { + where_fragments.push(format!("(ext ->> '{}' is null or ext ->> '{}' = '')", ext_item.field, ext_item.field)); + } else { + if value.len() > 1 { + return err_notfound(ext_item); + } + let Some(value) = value.pop() else { + return err_op_in_without_value(); + }; + if let Value::Bool(_) = value { + where_fragments.push(format!("(ext ->> '{}')::boolean {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyInt(_) = value { + where_fragments.push(format!("(ext ->> '{}')::smallint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallInt(_) = value { + where_fragments.push(format!("(ext ->> '{}')::smallint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Int(_) = value { + where_fragments.push(format!("(ext ->> '{}')::integer {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigInt(_) = value { + where_fragments.push(format!("(ext ->> '{}')::bigint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyUnsigned(_) = value { + where_fragments.push(format!("(ext ->> '{}')::smallint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallUnsigned(_) = value { + where_fragments.push(format!("(ext ->> '{}')::integer {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Unsigned(_) = value { + where_fragments.push(format!("(ext ->> '{}')::bigint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigUnsigned(_) = value { + // TODO + where_fragments.push(format!("(ext ->> '{}')::bigint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Float(_) = value { + where_fragments.push(format!("(ext ->> '{}')::real {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Double(_) = value { + where_fragments.push(format!("(ext ->> '{}')::double precision {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else { + where_fragments.push(format!("ext ->> '{}' {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } + sql_vals.push(value); + } + } + } + if let Some(ext_or) = &find_req.ext_or { + let mut or_fragments = vec![]; + for ext_or_item in ext_or { + let value = db_helper::json_to_sea_orm_value(&ext_or_item.value, &ext_or_item.op); + + let Some(mut value) = value else { + return err_notfound(ext_or_item); + }; + if ext_or_item.op == BasicQueryOpKind::In { + if value.len() == 1 { + or_fragments.push(format!("ext -> '{}' ? ${}", ext_or_item.field, sql_vals.len() + 1)); + } else { + or_fragments.push(format!( + "ext -> '{}' ?| array[{}]", + ext_or_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(", ") + )); + } + for val in value { + sql_vals.push(val); + } + } else { + if value.len() > 1 { + return err_notfound(ext_or_item); + } + let Some(value) = value.pop() else { + return err_op_in_without_value(); + }; + if let Value::Bool(_) = value { + or_fragments.push(format!("(ext ->> '{}')::boolean {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyInt(_) = value { + or_fragments.push(format!("(ext ->> '{}')::smallint {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallInt(_) = value { + or_fragments.push(format!("(ext ->> '{}')::smallint {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Int(_) = value { + or_fragments.push(format!("(ext ->> '{}')::integer {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigInt(_) = value { + or_fragments.push(format!("(ext ->> '{}')::bigint {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyUnsigned(_) = value { + or_fragments.push(format!("(ext ->> '{}')::smallint {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallUnsigned(_) = value { + or_fragments.push(format!("(ext ->> '{}')::integer {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Unsigned(_) = value { + or_fragments.push(format!("(ext ->> '{}')::bigint {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigUnsigned(_) = value { + // TODO + or_fragments.push(format!("(ext ->> '{}')::bigint {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Float(_) = value { + or_fragments.push(format!("(ext ->> '{}')::real {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Double(_) = value { + or_fragments.push(format!( + "(ext ->> '{}')::double precision {} ${}", + ext_or_item.field, + ext_or_item.op.to_sql(), + sql_vals.len() + 1 + )); + } else { + or_fragments.push(format!("ext ->> '{}' {} ${}", ext_or_item.field, ext_or_item.op.to_sql(), sql_vals.len() + 1)); + } + sql_vals.push(value); + } + } + where_fragments.push(format!(" ( {} ) ", or_fragments.join(" OR "))); + } + + // advanced query + let mut sql_adv_query = vec![]; + if let Some(adv_query) = &find_req.adv_query { + for group_query in adv_query { + let mut sql_and_where = vec![]; + let err_not_found = |ext_item: &AdvBasicQueryCondInfo| { + Err(funs.err().not_found( + "item", + "search", + &format!("The ext field=[{}] value=[{}] operation=[{}] is not legal.", &ext_item.field, ext_item.value, &ext_item.op,), + "404-spi-search-op-not-legal", + )) + }; + if let Some(ext) = &group_query.ext { + for ext_item in ext { + let value = db_helper::json_to_sea_orm_value(&ext_item.value, &ext_item.op); + let Some(mut value) = value else { return err_not_found(ext_item) }; + if ext_item.in_ext.unwrap_or(true) { + if ext_item.op == BasicQueryOpKind::In { + if value.len() == 1 { + sql_and_where.push(format!("ext -> '{}' ? ${}", ext_item.field, sql_vals.len() + 1)); + } else { + sql_and_where.push(format!( + "ext -> '{}' ?| array[{}]", + ext_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(", ") + )); + } + for val in value { + sql_vals.push(val); + } + } else if ext_item.op == BasicQueryOpKind::NotIn { + let value = value.clone(); + if value.len() == 1 { + sql_and_where.push(format!("not (ext -> '{}' ? ${})", ext_item.field, sql_vals.len() + 1)); + } else { + sql_and_where.push(format!( + "not (ext -> '{}' ?| array[{}])", + ext_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(", ") + )); + } + for val in value { + sql_vals.push(val); + } + } else if ext_item.op == BasicQueryOpKind::IsNull { + sql_and_where.push(format!("ext ->> '{}' is null", ext_item.field)); + } else if ext_item.op == BasicQueryOpKind::IsNotNull { + sql_and_where.push(format!("ext ->> '{}' is not null", ext_item.field)); + } else if ext_item.op == BasicQueryOpKind::IsNullOrEmpty { + sql_and_where.push(format!("(ext ->> '{}' is null or ext ->> '{}' = '' or (jsonb_typeof(ext -> '{}') = 'array' and (jsonb_array_length(ext-> '{}') is null or jsonb_array_length(ext-> '{}') = 0)))", ext_item.field, ext_item.field, ext_item.field, ext_item.field, ext_item.field)); + } else { + if value.len() > 1 { + return err_not_found(ext_item); + } + let Some(value) = value.pop() else { + return Err(funs.err().bad_request("item", "search", "Request item using 'IN' operator show hava a value", "400-spi-item-op-in-without-value")); + }; + if let Value::Bool(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::boolean {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyInt(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::smallint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallInt(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::smallint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Int(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::integer {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigInt(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::bigint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyUnsigned(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::smallint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallUnsigned(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::integer {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Unsigned(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::bigint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigUnsigned(_) = value { + // TODO + sql_and_where.push(format!("(ext ->> '{}')::bigint {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Float(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::real {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Double(_) = value { + sql_and_where.push(format!("(ext ->> '{}')::double precision {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if value.is_chrono_date_time_utc() { + sql_and_where.push(format!( + "(ext ->> '{}')::timestamp with time zone {} ${}", + ext_item.field, + ext_item.op.to_sql(), + sql_vals.len() + 1 + )); + } else { + sql_and_where.push(format!("ext ->> '{}' {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } + sql_vals.push(value); + } + } else if ext_item.op == BasicQueryOpKind::In { + if !value.is_empty() { + sql_and_where.push(format!( + "{} IN ({})", + ext_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(",") + )); + for val in value { + sql_vals.push(val); + } + } + } else if ext_item.op == BasicQueryOpKind::NotIn { + if !value.is_empty() { + sql_and_where.push(format!( + "{} NOT IN ({})", + ext_item.field, + (0..value.len()).map(|idx| format!("${}", sql_vals.len() + idx + 1)).collect::>().join(",") + )); + for val in value { + sql_vals.push(val); + } + } + } else if ext_item.op == BasicQueryOpKind::IsNull { + sql_and_where.push(format!("{} is null", ext_item.field)); + } else if ext_item.op == BasicQueryOpKind::IsNotNull { + sql_and_where.push(format!("({} is not null or {} != '' )", ext_item.field, ext_item.field)); + } else if ext_item.op == BasicQueryOpKind::IsNullOrEmpty { + sql_and_where.push(format!("({} is null or {} = '' )", ext_item.field, ext_item.field)); + } else { + if value.len() > 1 { + return err_not_found(ext_item); + } + let Some(value) = value.pop() else { + return Err(funs.err().bad_request("item", "search", "Request item using 'IN' operator show hava a value", "400-spi-item-op-in-without-value")); + }; + if let Value::Bool(_) = value { + sql_and_where.push(format!("({}::boolean) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyInt(_) = value { + sql_and_where.push(format!("({}::smallint) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallInt(_) = value { + sql_and_where.push(format!("({}::smallint) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Int(_) = value { + sql_and_where.push(format!("({}::integer) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigInt(_) = value { + sql_and_where.push(format!("({}::bigint) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::TinyUnsigned(_) = value { + sql_and_where.push(format!("({}::smallint) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::SmallUnsigned(_) = value { + sql_and_where.push(format!("({}::integer) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Unsigned(_) = value { + sql_and_where.push(format!("({}::bigint) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::BigUnsigned(_) = value { + // TODO + sql_and_where.push(format!("({}::bigint) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Float(_) = value { + sql_and_where.push(format!("({}::real) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else if let Value::Double(_) = value { + sql_and_where.push(format!("({}::double precision) {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } else { + sql_and_where.push(format!("{} {} ${}", ext_item.field, ext_item.op.to_sql(), sql_vals.len() + 1)); + } + sql_vals.push(value); + } + } + } + if !sql_and_where.is_empty() { + sql_adv_query.push(format!( + " {} ( {} )", + if group_query.group_by_or.unwrap_or(false) { "OR" } else { "AND" }, + sql_and_where.join(" AND ") + )); + } + } + } + if where_fragments.is_empty() { + where_fragments.push("1 = 1".to_string()); + } + + sql_vals.push(Value::from(find_req.page_size)); + sql_vals.push(Value::from((find_req.page_number - 1) * find_req.page_size as u32)); + let page_fragments = format!("LIMIT ${} OFFSET ${}", sql_vals.len() - 1, sql_vals.len()); + + let bs_inst = inst.inst::(); + let (conn, table_name) = log_pg_initializer::init_table_and_conn(bs_inst, &find_req.tag, ctx, false).await?; + let result = conn + .query_all( + format!( + r#"SELECT ts, idempotent_id, key, op, content, kind, ext, owner, owner_name, own_paths, rel_key, msg, count(*) OVER() AS total +FROM {table_name} +WHERE + {} + {} +ORDER BY ts DESC +{}"#, + where_fragments.join(" AND "), + if sql_adv_query.is_empty() { + "".to_string() + } else { + format!(" AND ( 1=1 {})", sql_adv_query.join(" ")) + }, + page_fragments + ) + .as_str(), + sql_vals, + ) + .await?; + + let mut total_size: i64 = 0; + + let mut result = result + .into_iter() + .map(|item| { + if total_size == 0 { + total_size = item.try_get("", "total")?; + } + Ok(LogItemFindResp { + ts: item.try_get("", "ts")?, + id: item.try_get("", "idempotent_id")?, + key: item.try_get("", "key")?, + op: item.try_get("", "op")?, + ext: item.try_get("", "ext")?, + content: item.try_get("", "content")?, + rel_key: item.try_get("", "rel_key")?, + kind: item.try_get("", "kind")?, + owner: item.try_get("", "owner")?, + own_paths: item.try_get("", "own_paths")?, + msg: item.try_get("", "msg")?, + owner_name: item.try_get("", "owner_name")?, + }) + }) + .collect::>>()?; + + // Stores the mapping relationship between ref_value and true_value + let mut cache_value = HashMap::::new(); + + for log in &mut result { + if let Some(json_map) = log.content.as_object_mut() { + for (k, v) in json_map { + if is_log_ref(v) { + if let Some(v_str) = v.as_str() { + let ref_string = v_str.to_string(); + if let Some(true_content) = cache_value.get(&ref_string) { + if let Some(true_value) = true_content.get(k) { + *v = true_value.clone(); + } + } else { + let (ts, key) = parse_ref_ts_key(v_str)?; + if let Some(query_result) = + conn.query_one(&format!("select content from {table_name} where ts=$1 and key=$2"), vec![Value::from(ts), Value::from(key)]).await? + { + let query_content: JsonValue = query_result.try_get("", "content")?; + if let Some(query_true_value) = query_content.get(k) { + *v = query_true_value.clone(); + } + cache_value.insert(ref_string, query_content); + } + } + } + } + } + } + } + + Ok(TardisPage { + page_size: find_req.page_size as u64, + page_number: find_req.page_number as u64, + total_size: total_size as u64, + records: result, + }) +} + +pub async fn modify_ext(tag: &str, key: &str, ext: &mut JsonValue, _funs: &TardisFunsInst, ctx: &TardisContext, inst: &SpiBsInst) -> TardisResult<()> { + let bs_inst = inst.inst::(); + let (conn, table_name) = log_pg_initializer::init_table_and_conn(bs_inst, tag, ctx, false).await?; + + let ext_str: String = serde_json::to_string(ext).map_err(|e| TardisError::internal_error(&format!("Fail to parse ext: {e}"), "500-spi-log-internal-error"))?; + conn.execute_one(&format!("update {table_name} set ext=ext||$1 where key=$2"), vec![Value::from(ext_str), Value::from(key)]).await?; + Ok(()) +} + +pub async fn add_config(req: &LogConfigReq, _funs: &TardisFunsInst, _ctx: &TardisContext, inst: &SpiBsInst) -> TardisResult<()> { + let table_full_name = bios_basic::spi::spi_initializer::common_pg::get_table_full_name(&inst.ext, TABLE_LOG_FLAG_V2.to_string(), req.tag.clone()); + let schema_name = get_schema_name_from_ext(&inst.ext).expect("ignore"); + let bs_inst = inst.inst::(); + if bs_inst + .0 + .conn() + .query_one( + &format!("select table_name,ref_field from {schema_name}.{CONFIG_TABLE_NAME} where table_name = $1 and ref_field = $2"), + vec![Value::from(table_full_name.clone()), Value::from(req.ref_field.clone())], + ) + .await? + .is_some() + { + Ok(()) + } else { + //新增记录 + bs_inst + .0 + .conn() + .execute_one( + &format!("insert into {schema_name}.{CONFIG_TABLE_NAME}(table_name,ref_field) VALUES ($1,$2)"), + vec![Value::from(table_full_name), Value::from(req.ref_field.clone())], + ) + .await?; + Ok(()) + } +} + +pub async fn delete_config(config: &mut LogConfigReq, _funs: &TardisFunsInst, _ctx: &TardisContext, inst: &SpiBsInst) -> TardisResult<()> { + let table_full_name = bios_basic::spi::spi_initializer::common_pg::get_table_full_name(&inst.ext, TABLE_LOG_FLAG_V2.to_string(), config.tag.clone()); + let schema_name = get_schema_name_from_ext(&inst.ext).expect("ignore"); + let bs_inst = inst.inst::(); + bs_inst + .0 + .conn() + .execute_one( + &format!("delete from {schema_name}.{CONFIG_TABLE_NAME} where table_name = $1 and ref_field = $2"), + vec![Value::from(table_full_name), Value::from(config.ref_field.clone())], + ) + .await?; + Ok(()) +} + +async fn get_ref_fields_by_table_name(conn: &TardisRelDBlConnection, schema_name: &str, table_full_name: &str) -> TardisResult> { + let query_results = conn + .query_all( + &format!("select ref_field from {schema_name}.{CONFIG_TABLE_NAME} where table_name = $1"), + vec![Value::from(table_full_name)], + ) + .await?; + + let mut ref_fields = Vec::new(); + for row in query_results { + let ref_field: String = row.try_get("", "ref_field")?; + ref_fields.push(ref_field); + } + + Ok(ref_fields) +} + +async fn push_to_eda(req: &LogItemAddReq, ref_fields: &Vec, funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { + if let Some(topic) = get_topic(&SPI_RPC_TOPIC) { + let mut req_clone = req.clone(); + for ref_field in ref_fields { + if let Some(content) = req_clone.content.as_object_mut() { + content.remove(ref_field); + } + } + let stats_add: StatsItemAddReq = req_clone.into(); + topic.send_event(stats_add.inject_context(funs, ctx).json()).map_err(mq_error).await?; + } + Ok(()) +} + +#[cfg(test)] +mod test { + use tardis::{chrono::Utc, serde_json::Value}; + + use crate::serv::pgv2::log_pg_item_serv::{is_log_ref, parse_ref_ts_key}; + + use super::get_ref_filed_value; + + #[test] + fn test_ref_value() { + let ts = Utc::now(); + let key = "test-key".to_owned(); + let ref_value = get_ref_filed_value(&ts, &key); + + assert!(is_log_ref(&Value::String(ref_value.clone()))); + assert!(!is_log_ref(&Value::String(key.to_string()))); + assert_eq!(parse_ref_ts_key(&ref_value).unwrap(), (ts, key)); + } +} diff --git a/backend/spi/spi-log/tests/test_log.rs b/backend/spi/spi-log/tests/test_log.rs index 2e8007be6..0e581d90b 100644 --- a/backend/spi/spi-log/tests/test_log.rs +++ b/backend/spi/spi-log/tests/test_log.rs @@ -6,7 +6,7 @@ use bios_basic::spi::dto::spi_bs_dto::SpiBsAddReq; use bios_basic::spi::spi_constants; use bios_basic::test::init_test_container; use bios_basic::test::test_http_client::TestHttpClient; -use bios_spi_log::log_constants::DOMAIN_CODE; +use bios_spi_log::log_constants::{DOMAIN_CODE, SPI_PG_V2_KIND_CODE}; use bios_spi_log::log_initializer; use tardis::basic::dto::TardisContext; use tardis::basic::field::TrimString; @@ -41,6 +41,7 @@ async fn init_data() -> TardisResult<()> { let funs = TardisFuns::inst_with_db_conn(DOMAIN_CODE.to_string(), None); let kind_id = RbumKindServ::get_rbum_kind_id_by_code(spi_constants::SPI_PG_KIND_CODE, &funs).await?.unwrap(); + let kind_v2_id = RbumKindServ::get_rbum_kind_id_by_code(SPI_PG_V2_KIND_CODE, &funs).await?.unwrap(); let ctx = TardisContext { own_paths: "".to_string(), ak: "".to_string(), @@ -70,9 +71,28 @@ async fn init_data() -> TardisResult<()> { ) .await; - let _: Void = client.put(&format!("/ci/manage/bs/{}/rel/app001", bs_id), &Void {}).await; + let bs_v2_id: String = client + .post( + "/ci/manage/bs", + &SpiBsAddReq { + name: TrimString("test-spi".to_string()), + kind_id: TrimString(kind_v2_id), + conn_uri: env::var("TARDIS_FW.DB.URL").unwrap(), + ak: TrimString("".to_string()), + sk: TrimString("".to_string()), + ext: "{\"max_connections\":20,\"min_connections\":10}".to_string(), + private: false, + disabled: None, + }, + ) + .await; - test_log_item::test(&mut client).await?; + let app001 = "app001"; + let app002 = "app002"; + let _: Void = client.put(&format!("/ci/manage/bs/{}/rel/{}", bs_id, app001), &Void {}).await; + let _: Void = client.put(&format!("/ci/manage/bs/{}/rel/{}", bs_v2_id, app002), &Void {}).await; + test_log_item::test(app001, &mut client).await?; + test_log_item::test(app002, &mut client).await?; Ok(()) } diff --git a/backend/spi/spi-log/tests/test_log_item.rs b/backend/spi/spi-log/tests/test_log_item.rs index a597b7fae..61151b1bf 100644 --- a/backend/spi/spi-log/tests/test_log_item.rs +++ b/backend/spi/spi-log/tests/test_log_item.rs @@ -5,13 +5,13 @@ use tardis::basic::result::TardisResult; use tardis::serde_json::json; use tardis::web::web_resp::{TardisPage, TardisResp, Void}; -pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { +pub async fn test(app: &str, client: &mut TestHttpClient) -> TardisResult<()> { client.set_auth(&TardisContext { - own_paths: "t1/app001".to_string(), - ak: "".to_string(), + own_paths: format!("t1/{}", app), + ak: app.to_string(), roles: vec![], groups: vec![], - owner: "app001".to_string(), + owner: app.to_string(), ..Default::default() })?; @@ -20,7 +20,7 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { "/ci/item", &json!({ "tag":"audit", - "content": r#"账号[xxxx]登录系统"#, + "content": "账号[xxxx]登录系统", "op":"login" }), ) @@ -32,7 +32,7 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { &json!({ "tag":"feed", "key": "001", - "content": r#"{"content":"在任意信息流(FEED,包含需求、任务、缺陷、文档等)中输入#号时出现一个跟随光标的快捷搜索小窗口,可以输入编号或内容模糊匹配对应的数据,如果存在,则可以选中对应的数据并显示在文本中。","title":"全局#号搜索","kind":"req","assign_to":"account002"}"#, + "content": {"content":"在任意信息流(FEED,包含需求、任务、缺陷、文档等)中输入#号时出现一个跟随光标的快捷搜索小窗口,可以输入编号或内容模糊匹配对应的数据,如果存在,则可以选中对应的数据并显示在文本中。","title":"全局#号搜索","kind":"req","assign_to":"account002"}, "op":"init", "ts":"2022-09-26T23:23:59.000Z", "rel_key":"app001" @@ -40,13 +40,34 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { ) .await; + // add ref_field config + let _: Void = client + .post( + "/ci/item/config", + &json!({ + "tag":"feed", + "ref_field": "content" + }), + ) + .await; + + let _: Void = client + .post( + "/ci/item/config", + &json!({ + "tag":"feed", + "ref_field": "title" + }), + ) + .await; + let _: Void = client .post( "/ci/item", &json!({ "tag":"feed", "key": "001", - "content": r#"{"assign_to":"account004"}"#, + "content": {"assign_to":"account004"}, "op":"modify", "ts":"2022-09-27T23:23:59.000Z", "rel_key":"app001" @@ -54,13 +75,32 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { ) .await; + let _: Void = client + .post( + "/ci/item", + &json!({ + "tag":"feed", + "key": "001", + "content": {"content":"在项目管理系统中,当用户点击任务卡片时,弹出一个详细信息窗口,显示任务的优先级、截止日期、负责人等关键信息。用户可以直接在窗口中编辑这些字段,系统会实时保存更改并通知相关人员。"}, + "op":"modify", + "ts":"2022-09-28T23:23:59.000Z", + "rel_key":"app001" + }), + ) + .await; + let _: Void = client .post( "/ci/item", &json!({ "tag":"feed", "key": "002", - "content": r#"{"content":"账号登录 登录名:默认提示:用户名/手机号/邮箱,输入类型不限,最多输入30个字 密码:默认提示:密码,输入类型不限,最多输入30个字; 登录:1、点击判断用户名和密码是否已填写,如果没有则在每个必填项下提示...","title":"新增全局账号逻辑","kind":"req","assign_to":"account002"}"#, + "content": { + "content":"账号登录 登录名:默认提示:用户名/手机号/邮箱,输入类型不限,最多输入30个字 密码:默认提示:密码,输入类型不限,最多输入30个字; 登录:1、点击判断用户名和密码是否已填写,如果没有则在每个必填项下提示...", + "title":"新增全局账号逻辑", + "kind":"req", + "assign_to":"account002" + }, "op":"init", "ts":"2022-09-26T23:23:50.000Z", "rel_key":"app002" @@ -75,7 +115,12 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { "tag":"project", "kind":"req", "key": "001", - "content": r#"{"content":"账号登录 登录名:默认提示:用户名/手机号/邮箱,输入类型不限,最多输入30个字 密码:默认提示:密码,输入类型不限,最多输入30个字; 登录:1、点击判断用户名和密码是否已填写,如果没有则在每个必填项下提示...","title":"新增全局账号逻辑","kind":"req","assign_to":"account002"}"#, + "content": { + "content":"账号登录 登录名:默认提示:用户名/手机号/邮箱,输入类型不限,最多输入30个字 密码:默认提示:密码,输入类型不限,最多输入30个字; 登录:1、点击判断用户名和密码是否已填写,如果没有则在每个必填项下提示...", + "title":"新增全局账号逻辑", + "kind":"req", + "assign_to":"account002" + }, "ext": {"name":"测试","status":1,"apps":["app01"],"assign_to":"account002"}, "owner":"account002", "own_paths":"tenant001", @@ -108,7 +153,7 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { }), ) .await; - assert_eq!(find_result.total_size, 3); + assert_eq!(find_result.total_size, 4); assert_eq!(find_result.records[0].key, "001"); assert_eq!(find_result.records[0].op, "modify"); @@ -123,7 +168,7 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { }), ) .await; - assert_eq!(find_result.total_size, 2); + assert_eq!(find_result.total_size, 3); assert_eq!(find_result.records[0].key, "001"); assert_eq!(find_result.records[0].op, "modify"); @@ -154,7 +199,7 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { }), ) .await; - assert_eq!(find_result.total_size, 2); + assert_eq!(find_result.total_size, 3); assert_eq!(find_result.records[0].key, "001"); assert_eq!(find_result.records[0].op, "modify"); @@ -187,7 +232,7 @@ pub async fn test(client: &mut TestHttpClient) -> TardisResult<()> { }), ) .await; - assert_eq!(find_result.total_size, 2); + assert_eq!(find_result.total_size, 3); assert_eq!(find_result.records[0].key, "001"); assert_eq!(find_result.records[0].op, "modify"); diff --git a/backend/spi/spi-search/src/dto/search_item_dto.rs b/backend/spi/spi-search/src/dto/search_item_dto.rs index b732cfdd7..7c5098050 100644 --- a/backend/spi/spi-search/src/dto/search_item_dto.rs +++ b/backend/spi/spi-search/src/dto/search_item_dto.rs @@ -25,7 +25,7 @@ pub struct SearchItemAddReq { pub content: String, #[oai(validator(min_length = "2"))] pub owner: Option, - #[oai(validator(min_length = "2"))] + // #[oai(validator(min_length = "2"))] pub own_paths: Option, pub create_time: Option>, pub update_time: Option>, diff --git a/backend/spi/spi-search/src/serv/pg/search_pg_initializer.rs b/backend/spi/spi-search/src/serv/pg/search_pg_initializer.rs index e10e45aab..5c7340254 100644 --- a/backend/spi/spi-search/src/serv/pg/search_pg_initializer.rs +++ b/backend/spi/spi-search/src/serv/pg/search_pg_initializer.rs @@ -23,6 +23,7 @@ pub async fn init_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelDBClient>, update_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, ext jsonb NOT NULL, visit_keys jsonb"#, + None, vec![ ("kind", "btree"), ("key", "btree"), diff --git a/backend/spi/spi-stats/Cargo.toml b/backend/spi/spi-stats/Cargo.toml index cad3aee71..bdcd0f1ea 100644 --- a/backend/spi/spi-stats/Cargo.toml +++ b/backend/spi/spi-stats/Cargo.toml @@ -26,6 +26,9 @@ itertools.workspace = true tardis = { workspace = true, features = ["reldb-postgres", "web-server"] } serde_json = { workspace = true, features = ["preserve_order"] } strum = { workspace = true, features = ["derive"] } +bios-sdk-invoke = { path = "../../../frontend/sdks/invoke", features = [ + "event", +], default-features = false } [dev-dependencies] tardis = { workspace = true, features = ["test"] } diff --git a/backend/spi/spi-stats/src/event.rs b/backend/spi/spi-stats/src/event.rs new file mode 100644 index 000000000..325b75a58 --- /dev/null +++ b/backend/spi/spi-stats/src/event.rs @@ -0,0 +1,25 @@ +use crate::{get_tardis_inst, serv}; +use bios_sdk_invoke::clients::{ + event_client::{get_topic, mq_error, ContextHandler, SPI_RPC_TOPIC}, + spi_log_client::LogItemAddReq, +}; +use tardis::{ + basic::{dto::TardisContext, result::TardisResult}, + {log as tracing, log::instrument}, +}; + +#[instrument] +async fn handle_add_event(req: LogItemAddReq, ctx: TardisContext) -> TardisResult<()> { + let funs = get_tardis_inst(); + //TODO + Ok(()) +} + +pub async fn handle_events() -> TardisResult<()> { + use bios_sdk_invoke::clients::event_client::asteroid_mq::prelude::*; + if let Some(topic) = get_topic(&SPI_RPC_TOPIC) { + topic.create_endpoint([Interest::new("stats/*")]).await.map_err(mq_error)?.create_event_loop().with_handler(ContextHandler(handle_add_event)).spawn(); + } + + Ok(()) +} diff --git a/backend/spi/spi-stats/src/lib.rs b/backend/spi/spi-stats/src/lib.rs index c75a7be8b..104cb38f5 100644 --- a/backend/spi/spi-stats/src/lib.rs +++ b/backend/spi/spi-stats/src/lib.rs @@ -4,6 +4,7 @@ extern crate lazy_static; mod api; pub mod dto; +pub mod event; mod serv; pub mod stats_config; pub mod stats_constants; diff --git a/backend/spi/spi-stats/src/serv/pg/stats_pg_initializer.rs b/backend/spi/spi-stats/src/serv/pg/stats_pg_initializer.rs index e6b6c1897..179c8c897 100644 --- a/backend/spi/spi-stats/src/serv/pg/stats_pg_initializer.rs +++ b/backend/spi/spi-stats/src/serv/pg/stats_pg_initializer.rs @@ -24,6 +24,7 @@ pub async fn init_conf_dim_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRelD rel_attribute_url character varying, create_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, update_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP"#, + None, vec![], None, Some("update_time"), @@ -46,6 +47,7 @@ pub async fn init_conf_fact_table_and_conn(bs_inst: TypedSpiBsInst<'_, TardisRel remark character varying NOT NULL, create_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, update_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP"#, + None, vec![], None, Some("update_time"), @@ -80,6 +82,7 @@ pub async fn init_conf_fact_col_table_and_conn(bs_inst: TypedSpiBsInst<'_, Tardi create_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, update_time timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, unique (key, rel_conf_fact_key)"#, + None, vec![("rel_conf_fact_key", "btree")], None, Some("update_time"), diff --git a/backend/supports/iam/src/basic/serv/clients/iam_log_client.rs b/backend/supports/iam/src/basic/serv/clients/iam_log_client.rs index a156a2e55..a741393aa 100644 --- a/backend/supports/iam/src/basic/serv/clients/iam_log_client.rs +++ b/backend/supports/iam/src/basic/serv/clients/iam_log_client.rs @@ -138,24 +138,24 @@ impl IamLogClient { let tag: String = tag.into(); let own_paths = if ctx.own_paths.len() < 2 { None } else { Some(ctx.own_paths.clone()) }; let owner = if ctx.owner.len() < 2 { None } else { Some(ctx.owner.clone()) }; + let add_req = LogItemAddReq { tag, - content: TardisFuns::json.obj_to_string(&content).expect("req_msg not a valid json value"), + content: TardisFuns::json.obj_to_json(&content).expect("req_msg not a valid json value"), kind, ext: Some(search_ext), key, op, rel_key, - id: None, + idempotent_id: None, ts: ts.map(|ts| DateTime::parse_from_rfc3339(&ts).unwrap_or_default().with_timezone(&Utc)), owner, own_paths, + msg: None, + owner_name: None, + push: false, }; - if let Some(topic) = get_topic(&SPI_RPC_TOPIC) { - topic.send_event(add_req.inject_context(funs, ctx).json()).map_err(mq_error).await?; - } else { - SpiLogClient::add(&add_req, funs, ctx).await?; - } + SpiLogClient::add(add_req, funs, ctx).await?; Ok(()) } diff --git a/backend/supports/iam/src/basic/serv/iam_account_serv.rs b/backend/supports/iam/src/basic/serv/iam_account_serv.rs index 12f23cc4f..b153a412c 100644 --- a/backend/supports/iam/src/basic/serv/iam_account_serv.rs +++ b/backend/supports/iam/src/basic/serv/iam_account_serv.rs @@ -471,24 +471,31 @@ impl IamAccountServ { IamSetServ::get_set_id_by_code(&IamSetServ::get_default_code(&IamSetKind::Org, &IamTenantServ::get_id_by_ctx(ctx, funs)?), true, funs, ctx).await? // IamSetServ::get_default_set_id_by_ctx(&IamSetKind::Org, funs, ctx).await? }; - let roles = IamRoleServ::find_items(&IamRoleFilterReq { - basic: RbumBasicFilterReq { - ignore_scope: false, - rel_ctx_owner: false, - with_sub_own_paths: true, - enabled: Some(true), + let roles = IamRoleServ::find_items( + &IamRoleFilterReq { + basic: RbumBasicFilterReq { + ignore_scope: false, + rel_ctx_owner: false, + with_sub_own_paths: true, + enabled: Some(true), + ..Default::default() + }, + rel: Some(RbumItemRelFilterReq { + rel_by_from: false, + optional: false, + tag: Some(IamRelKind::IamAccountRole.to_string()), + from_rbum_kind: Some(RbumRelFromKind::Item), + rel_item_id: Some(account.id.clone()), + ..Default::default() + }), ..Default::default() }, - rel: Some(RbumItemRelFilterReq { - rel_by_from: false, - optional: false, - tag: Some(IamRelKind::IamAccountRole.to_string()), - from_rbum_kind: Some(RbumRelFromKind::Item), - rel_item_id: Some(account.id.clone()), - ..Default::default() - }), - ..Default::default() - }, None, None, funs, ctx).await?; + None, + None, + funs, + ctx, + ) + .await?; let enabled_apps = IamAppServ::find_items( &IamAppFilterReq { diff --git a/backend/supports/iam/src/console_interface/api/iam_ci_tenant_api.rs b/backend/supports/iam/src/console_interface/api/iam_ci_tenant_api.rs index f75d7224f..56c331c46 100644 --- a/backend/supports/iam/src/console_interface/api/iam_ci_tenant_api.rs +++ b/backend/supports/iam/src/console_interface/api/iam_ci_tenant_api.rs @@ -33,7 +33,6 @@ pub struct IamCiTenantApi; /// 接口控制台租户API #[poem_openapi::OpenApi(prefix_path = "/ci/tenant", tag = "bios_basic::ApiTag::Tenant")] impl IamCiTenantApi { - /// Find Tenants /// 查找租户 #[oai(path = "/all", method = "get")] diff --git a/frontend/sdks/invoke/Cargo.toml b/frontend/sdks/invoke/Cargo.toml index 8f97e8884..629a8980d 100644 --- a/frontend/sdks/invoke/Cargo.toml +++ b/frontend/sdks/invoke/Cargo.toml @@ -18,11 +18,11 @@ path = "src/lib.rs" default = ["spi_kv", "spi_log", "spi_search", "spi_stats", "iam", "event"] spi_base = [] spi_kv = ["spi_base"] -spi_log = ["spi_base"] +spi_log = ["spi_base", "iam"] spi_search = ["spi_base"] spi_stats = ["spi_base"] iam = [] -event = [] +event = ["asteroid-mq"] macro = ["dep:simple-invoke-client-macro"] [dependencies] @@ -37,7 +37,7 @@ tardis = { workspace = true, features = [ ] } simple-invoke-client-macro = { path = "../simple-invoke-client-macro", optional = true } crossbeam = "0.8" -asteroid-mq = { workspace = true, features = ["cluster-k8s", "json"] } +asteroid-mq = { workspace = true, features = ["cluster-k8s"], optional = true } [dev-dependencies] tardis = { workspace = true, features = ["test"] } simple-invoke-client-macro = { path = "../simple-invoke-client-macro" } @@ -45,4 +45,3 @@ simple-invoke-client-macro = { path = "../simple-invoke-client-macro" } [[test]] name = "test_macros" required-features = ["macro"] - diff --git a/frontend/sdks/invoke/src/clients/flow_client.rs b/frontend/sdks/invoke/src/clients/flow_client.rs index 3101b023e..39a2067a9 100644 --- a/frontend/sdks/invoke/src/clients/flow_client.rs +++ b/frontend/sdks/invoke/src/clients/flow_client.rs @@ -1,5 +1,6 @@ use serde::{Deserialize, Serialize}; +#[cfg(feature = "event")] pub mod event { use asteroid_mq::prelude::*; @@ -9,10 +10,10 @@ pub mod event { pub const EVENT_POST_CHANGE: &str = "flow/post_change"; impl EventAttribute for FlowFrontChangeReq { - const SUBJECT: Subject = Subject::const_new(b"flow/front_change"); + const SUBJECT: Subject = Subject::const_new("flow/front_change"); } impl EventAttribute for FlowPostChangeReq { - const SUBJECT: Subject = Subject::const_new(b"flow/post_change"); + const SUBJECT: Subject = Subject::const_new("flow/post_change"); } } diff --git a/frontend/sdks/invoke/src/clients/iam_client.rs b/frontend/sdks/invoke/src/clients/iam_client.rs index ac846f96d..bdc538531 100644 --- a/frontend/sdks/invoke/src/clients/iam_client.rs +++ b/frontend/sdks/invoke/src/clients/iam_client.rs @@ -42,6 +42,7 @@ impl<'a> SimpleInvokeClient for IamClient<'a> { #[derive(poem_openapi::Object, Serialize, Deserialize, Debug)] pub struct IamAccountDetailAggResp { + pub owner_name: Option, pub roles: HashMap, pub certs: HashMap, pub orgs: Vec, diff --git a/frontend/sdks/invoke/src/clients/spi_kv_client.rs b/frontend/sdks/invoke/src/clients/spi_kv_client.rs index 83e50acf9..4f63dbd2e 100644 --- a/frontend/sdks/invoke/src/clients/spi_kv_client.rs +++ b/frontend/sdks/invoke/src/clients/spi_kv_client.rs @@ -14,10 +14,10 @@ use super::base_spi_client::BaseSpiClient; pub mod event { use asteroid_mq::prelude::*; impl EventAttribute for super::KvItemAddOrModifyReq { - const SUBJECT: Subject = Subject::const_new(b"kv/add"); + const SUBJECT: Subject = Subject::const_new("kv/add"); } impl EventAttribute for super::KvItemDeleteReq { - const SUBJECT: Subject = Subject::const_new(b"kv/delete"); + const SUBJECT: Subject = Subject::const_new("kv/delete"); } } #[derive(Clone, Debug, Default)] diff --git a/frontend/sdks/invoke/src/clients/spi_log_client.rs b/frontend/sdks/invoke/src/clients/spi_log_client.rs index c7b09da2a..61cebc790 100644 --- a/frontend/sdks/invoke/src/clients/spi_log_client.rs +++ b/frontend/sdks/invoke/src/clients/spi_log_client.rs @@ -11,15 +11,22 @@ use tardis::{ TardisFuns, TardisFunsInst, }; -use crate::{clients::base_spi_client::BaseSpiClient, invoke_constants::DYNAMIC_LOG, invoke_enumeration::InvokeModuleKind}; +use crate::{clients::base_spi_client::BaseSpiClient, invoke_config::InvokeConfig, invoke_constants::DYNAMIC_LOG, invoke_enumeration::InvokeModuleKind}; +#[cfg(feature = "event")] +use super::event_client::{get_topic, mq_error, EventAttributeExt, SPI_RPC_TOPIC}; +use super::iam_client::IamClient; +#[cfg(feature = "event")] +use tardis::futures::TryFutureExt as _; + +#[cfg(feature = "event")] pub mod event { use asteroid_mq::prelude::*; pub const LOG_AVATAR: &str = "spi-log"; impl EventAttribute for super::LogItemAddReq { - const SUBJECT: Subject = Subject::const_new(b"log/add"); + const SUBJECT: Subject = Subject::const_new("log/add"); } } #[derive(Debug, Default, Clone)] @@ -64,16 +71,19 @@ pub struct LogDynamicContentReq { #[derive(Serialize, Deserialize, Debug, Default)] pub struct LogItemAddReq { pub tag: String, - pub content: String, + pub content: Value, pub kind: Option, pub ext: Option, pub key: Option, pub op: Option, pub rel_key: Option, - pub id: Option, + pub idempotent_id: Option, pub ts: Option>, pub owner: Option, + pub owner_name: Option, pub own_paths: Option, + pub push: bool, + pub msg: Option, } impl SpiLogClient { @@ -88,62 +98,40 @@ impl SpiLogClient { funs: &TardisFunsInst, ctx: &TardisContext, ) -> TardisResult<()> { - Self::add_with_many_params( - DYNAMIC_LOG, - &TardisFuns::json.obj_to_string(content)?, - ext, + let cfg = funs.conf::(); + let owner_name = IamClient::new("", funs, &ctx, cfg.module_urls.get("iam").expect("missing iam base url")).get_account(&ctx.owner, &ctx.own_paths).await?.owner_name; + let req = LogItemAddReq { + tag: DYNAMIC_LOG.to_string(), + content: TardisFuns::json.obj_to_json(content)?, kind, + ext, key, op, rel_key, - ts, - Some(ctx.owner.clone()), - Some(ctx.own_paths.clone()), - funs, - ctx, - ) - .await?; + idempotent_id: None, + ts: ts.map(|ts| DateTime::parse_from_rfc3339(&ts).unwrap_or_default().with_timezone(&Utc)), + owner: Some(ctx.owner.clone()), + own_paths: Some(ctx.own_paths.clone()), + msg: None, + owner_name: owner_name, + push: false, + }; + Self::add(req, funs, ctx).await?; Ok(()) } - pub async fn add(req: &LogItemAddReq, funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { + pub async fn add(req: LogItemAddReq, funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult<()> { + #[cfg(feature = "event")] + if let Some(topic) = get_topic(&SPI_RPC_TOPIC) { + topic.send_event(req.inject_context(funs, ctx).json()).map_err(mq_error).await?; + return Ok(()); + } let log_url: String = BaseSpiClient::module_url(InvokeModuleKind::Log, funs).await?; let headers = BaseSpiClient::headers(None, funs, ctx).await?; funs.web_client().post_obj_to_str(&format!("{log_url}/ci/item"), &req, headers.clone()).await?; Ok(()) } - #[deprecated = "this function has too many parameters, use `SpiLogClient::add` instead"] - pub async fn add_with_many_params( - tag: &str, - content: &str, - ext: Option, - kind: Option, - key: Option, - op: Option, - rel_key: Option, - ts: Option, - owner: Option, - own_paths: Option, - funs: &TardisFunsInst, - ctx: &TardisContext, - ) -> TardisResult<()> { - let req = LogItemAddReq { - tag: tag.to_string(), - content: content.to_string(), - kind, - ext, - key, - op, - rel_key, - id: None, - ts: ts.map(|ts| DateTime::parse_from_rfc3339(&ts).unwrap_or_default().with_timezone(&Utc)), - owner, - own_paths, - }; - Self::add(&req, funs, ctx).await - } - pub async fn find(find_req: LogItemFindReq, funs: &TardisFunsInst, ctx: &TardisContext) -> TardisResult>> { let log_url: String = BaseSpiClient::module_url(InvokeModuleKind::Log, funs).await?; let headers = BaseSpiClient::headers(None, funs, ctx).await?; diff --git a/frontend/sdks/invoke/src/clients/spi_search_client.rs b/frontend/sdks/invoke/src/clients/spi_search_client.rs index a28e4c5f1..b3fa10f9b 100644 --- a/frontend/sdks/invoke/src/clients/spi_search_client.rs +++ b/frontend/sdks/invoke/src/clients/spi_search_client.rs @@ -23,13 +23,13 @@ pub mod event { } impl EventAttribute for SearchItemAddReq { - const SUBJECT: Subject = Subject::const_new(b"search/add"); + const SUBJECT: Subject = Subject::const_new("search/add"); } impl EventAttribute for SearchEventItemModifyReq { - const SUBJECT: Subject = Subject::const_new(b"search/modify"); + const SUBJECT: Subject = Subject::const_new("search/modify"); } impl EventAttribute for SearchEventItemDeleteReq { - const SUBJECT: Subject = Subject::const_new(b"search/delete"); + const SUBJECT: Subject = Subject::const_new("search/delete"); } }