diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6da69ec..570b082 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,7 +14,7 @@ holo auto installer does 2 main things. - Install a happs that are supposed to be installed on the holoport but are not installed `install_holo_hosted_happs` - Uninstall happs that are not supposed to be installed on the holoport but are installed - `uninstall_ineligible_happs` + `handle_ineligible_happs` Generally if you want to restrict something so it is not installed on the holoport you can use the function inside `uninstall_apps.rs` called `should_be_installed` If this returns a `false` the happ will be uninstalled form the holoport. diff --git a/Cargo.lock b/Cargo.lock index 1b86dc5..c4dd904 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2241,7 +2241,7 @@ dependencies = [ [[package]] name = "hpos_hc_connect" version = "0.1.0" -source = "git+https://github.com/holo-host/hpos-service-crates.git?rev=63044a665eafc9f8f6ff130801ad45303e1288f4#63044a665eafc9f8f6ff130801ad45303e1288f4" +source = "git+https://github.com/holo-host/hpos-service-crates.git?rev=1a333b0688dbc7a2dbbe64eb25a2d98f8200b14d#1a333b0688dbc7a2dbbe64eb25a2d98f8200b14d" dependencies = [ "again", "anyhow", diff --git a/Cargo.toml b/Cargo.toml index e95a4da..be3dd87 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,4 +34,4 @@ mr_bundle = { version = "0.4.0-dev.4" } hpos-config-core = { git = "https://github.com/Holo-Host/hpos-config", rev = "a36f862869cc162c843ac27ed617910d68f480cc" } hpos-config-seed-bundle-explorer ={ git = "https://github.com/Holo-Host/hpos-config", rev = "a36f862869cc162c843ac27ed617910d68f480cc" } chrono = "0.4.33" -hpos_hc_connect = { git = "https://github.com/holo-host/hpos-service-crates.git", rev = "63044a665eafc9f8f6ff130801ad45303e1288f4" } +hpos_hc_connect = { git = "https://github.com/holo-host/hpos-service-crates.git", rev = "1a333b0688dbc7a2dbbe64eb25a2d98f8200b14d" } diff --git a/src/entries.rs b/src/entries.rs deleted file mode 100644 index f63bca7..0000000 --- a/src/entries.rs +++ /dev/null @@ -1,80 +0,0 @@ -use anyhow::{Context, Result}; -use holochain_types::prelude::ActionHashB64; -use holochain_types::prelude::AgentPubKeyB64; -use holochain_types::prelude::MembraneProof; -use holofuel_types::fuel::Fuel; -use serde::Deserialize; -use serde::Serialize; -use std::fs::File; -use std::time::Duration; -use std::{collections::HashMap, env}; -use tracing::trace; - -#[derive(Deserialize, Debug, Clone)] -pub struct PublisherPricingPref { - pub cpu: Fuel, - pub storage: Fuel, - pub bandwidth: Fuel, -} - -#[derive(Deserialize, Debug, Clone)] -pub struct DnaResource { - pub hash: String, // hash of the dna, not a stored dht address - pub src_url: String, - pub nick: String, -} - -#[derive(Debug, Clone, Deserialize)] -pub struct HostSettings { - pub is_enabled: bool, - pub is_host_disabled: bool, - pub is_auto_disabled: bool, -} - -#[derive(Deserialize, Debug, Clone)] -pub struct PresentedHappBundle { - pub id: ActionHashB64, - pub provider_pubkey: AgentPubKeyB64, - pub is_draft: bool, - pub is_paused: bool, - pub uid: Option, - pub bundle_url: String, - pub name: String, - pub categories: Vec, - pub jurisdictions: Vec, - pub exclude_jurisdictions: bool, - pub special_installed_app_id: Option, - pub host_settings: HostSettings, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct HappPreferences { - pub max_fuel_before_invoice: Fuel, - pub max_time_before_invoice: Duration, - pub price_compute: Fuel, - pub price_storage: Fuel, - pub price_bandwidth: Fuel, -} -impl HappPreferences { - /// Save preferences to a file under {SL_PREFS_PATH} - /// which allows hpos-api to read current values - pub fn save(self) -> Result { - if let Ok(path) = env::var("SL_PREFS_PATH") { - trace!("Writing default servicelogger prefs to {}", &path); - // create or overwrite to a file - let file = File::create(&path)?; - serde_yaml::to_writer(file, &self).context(format!( - "Failed writing service logger preferences to file {}", - path - ))?; - }; - Ok(self) - } -} - -#[derive(Serialize, Debug, Clone)] -pub struct InstallHappBody { - pub happ_id: String, - pub preferences: HappPreferences, - pub membrane_proofs: HashMap, -} diff --git a/src/lib.rs b/src/lib.rs index 6e38450..33c6df9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,81 +1,108 @@ // TODO: https://github.com/tokio-rs/tracing/issues/843 #![allow(clippy::unit_arg)] -pub mod entries; -use std::collections::HashMap; +pub mod types; +mod utils; + +pub use crate::types::happ::HappPreferences; +pub use hpos_hc_connect::AdminWebsocket; use anyhow::Result; use holochain_types::dna::{hash_type::Agent, HoloHash}; -use hpos_hc_connect::hha_agent::HHAAgent; -pub use hpos_hc_connect::AdminWebsocket; -pub mod transaction_types; -mod utils; +use hpos_hc_connect::{hha_agent::HHAAgent, holo_config::Config}; +use std::collections::HashMap; use tracing::{debug, error, info}; +use types::hbs::{HbsClient, KycLevel}; +use types::PublishedHappDetails; use utils::{ - get_all_published_hosted_happs, get_happ_preferences, get_hosting_preferences, - get_pending_transactions, get_publisher_jurisdiction, install_holo_hosted_happs, - suspend_unpaid_happs, uninstall_ineligible_happs, + get_all_published_hosted_happs, handle_ineligible_happs, install_holo_hosted_happs, + suspend_unpaid_happs, }; -mod hbs; -use hbs::{HbsClient, KycLevel}; -use hpos_hc_connect::holo_config::Config; -/// gets all the enabled happs from HHA -/// installs and enables new happs that were registered by a provider and holochain disables those paused by provider in hha -/// then uninstalls happs that are ineligible for host (eg: holo-disabled, unallowed pricing for kyc level) +/// 1. Gets all the holo-enabled happs from HHA +/// 2. Suspends happs with overdue payments +/// 3. Installs and enables (enables in holochain and holo) all new happs that were registered by a provider and holochain-disables those paused by provider in hha +/// 4. Uninstalls happs that are ineligible for host (eg: holo-disabled, unallowed pricing for kyc level, incongruent price settings with publisher/happ) pub async fn run(config: &Config) -> Result<()> { info!("Activating holo hosted apps"); let hbs_connect = HbsClient::connect()?; - let hosting_criteria = match hbs_connect.get_hosting_criteria().await { + let host_credentials = match hbs_connect.get_host_hosting_criteria().await { Some(v) => v, None => { error!("Unable to get hosting criteria from HBS. Exiting..."); return Err(anyhow::anyhow!("Unable to get hosting criteria")); } }; - let kyc_level = hosting_criteria.kyc; - debug!("Got kyc level {:?}", &kyc_level); - let jurisdiction = hosting_criteria.jurisdiction; - debug!("Got jurisdiction from hbs {:?}", jurisdiction); - - let is_kyc_level_2 = kyc_level == KycLevel::Level2; + debug!("Got host credentials from hbs {:?}", host_credentials); let mut core_app = HHAAgent::spawn(Some(config)).await?; - // suspend happs that have overdue payments - let pending_transactions = get_pending_transactions(&mut core_app).await?; + // Suspend happs that have overdue payments + let pending_transactions = core_app.get_pending_transactions().await?; let suspended_happs = suspend_unpaid_happs(&mut core_app, pending_transactions).await?; - let hosting_preference = get_hosting_preferences(&mut core_app).await?; - let list_of_happs = get_all_published_hosted_happs(&mut core_app).await?; + let published_happs = get_all_published_hosted_happs(&mut core_app).await?; + + // Get happ jurisdictions AND publisher jurisdiction for each happ + let mut published_happ_details: HashMap = HashMap::new(); let mut publisher_jurisdictions: HashMap, Option> = HashMap::new(); - let mut happ_jurisdictions: HashMap> = HashMap::new(); - // get publisher jurisdiction for each happ - for happ in list_of_happs.iter() { - let happ_prefs = get_happ_preferences(&mut core_app, happ.happ_id.clone()).await?; + + for happ in published_happs.iter() { + let happ_prefs = core_app.get_happ_preferences(happ.happ_id.clone()).await?; let publisher_pubkey = happ_prefs.provider_pubkey; + + // If already have publisher pubkey stored in `publisher_jurisdictions` map, then grab the jurisdiction value and set value in `published_happ_details` map + // otherwise, make a call to hha to fetch the publisher jurisdiction and set in both the `published_happ_details` map and `publisher_jurisdictions` map match publisher_jurisdictions.get(&publisher_pubkey) { Some(jurisdiction) => { - happ_jurisdictions - .insert(happ.happ_id.clone().to_string(), (*jurisdiction).clone()); + published_happ_details.insert( + happ.happ_id.clone().to_string(), + PublishedHappDetails { + publisher_jurisdiction: (*jurisdiction).clone(), + happ_jurisdictions: happ.jurisdictions.clone(), + should_exclude_happ_jurisdictions: happ.exclude_jurisdictions, + happ_categories: happ.categories.clone(), + is_disabled_by_host: happ.is_host_disabled, + }, + ); } None => { - let jurisdiction = - get_publisher_jurisdiction(&mut core_app, publisher_pubkey.clone()).await?; + let jurisdiction = core_app + .get_publisher_jurisdiction(publisher_pubkey.clone()) + .await?; publisher_jurisdictions.insert(publisher_pubkey, jurisdiction.clone()); - happ_jurisdictions.insert(happ.happ_id.clone().to_string(), jurisdiction); + published_happ_details.insert( + happ.happ_id.clone().to_string(), + PublishedHappDetails { + publisher_jurisdiction: jurisdiction, + happ_jurisdictions: happ.jurisdictions.clone(), + should_exclude_happ_jurisdictions: happ.exclude_jurisdictions, + happ_categories: happ.categories.clone(), + is_disabled_by_host: happ.is_host_disabled, + }, + ); } } } - install_holo_hosted_happs(config, &list_of_happs, is_kyc_level_2).await?; - uninstall_ineligible_happs( - config, - &list_of_happs, - is_kyc_level_2, + let host_happ_preferences = core_app.get_host_preferences().await?.into(); + + let is_host_kyc_level_2 = host_credentials.clone().kyc == KycLevel::Level2; + + install_holo_hosted_happs( + &mut core_app, + config.admin_port, + &published_happs, + is_host_kyc_level_2, + ) + .await?; + + handle_ineligible_happs( + &mut core_app, + config.admin_port, suspended_happs, - jurisdiction, - hosting_preference, - happ_jurisdictions, + host_credentials, + host_happ_preferences, + published_happ_details, ) .await?; Ok(()) diff --git a/src/transaction_types.rs b/src/transaction_types.rs deleted file mode 100644 index 73fe590..0000000 --- a/src/transaction_types.rs +++ /dev/null @@ -1,122 +0,0 @@ -use std::time::Duration; - -use holochain_types::dna::ActionHashB64; -use holochain_types::dna::AgentPubKey; -use holochain_types::dna::AgentPubKeyB64; -use holochain_types::dna::EntryHashB64; -use holochain_types::prelude::CapSecret; -use holochain_types::prelude::Timestamp; -use holofuel_types::fuel::Fuel; - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub enum AcceptedBy { - ByMe, - ByCounterParty, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub enum TransactionStatus { - Actionable, // tx that is create by 1st instance and waiting for counterparty to complete the tx - Pending, // tx that was created by 1st instance and second instance - Accepted(AcceptedBy), // tx that was accepted by counterparty but has yet to complete countersigning. - Completed, - Declined, - Expired, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub enum TransactionDirection { - Outgoing, // To(Address), - Incoming, // From(Address), -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -#[serde(rename_all = "snake_case")] -pub enum POS { - Hosting(CapSecret), - Redemption(String), // Contains wallet address -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub enum TransactionType { - Request, //Invoice - Offer, //Promise -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct Transaction { - pub id: EntryHashB64, - pub amount: String, - pub fee: String, - pub created_date: Timestamp, - pub completed_date: Option, - pub transaction_type: TransactionType, - pub counterparty: AgentPubKeyB64, - pub direction: TransactionDirection, - pub status: TransactionStatus, - pub note: Option, - pub proof_of_service: Option, - pub url: Option, - pub expiration_date: Option, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct PendingTransaction { - pub invoice_pending: Vec, - pub promise_pending: Vec, - pub invoice_declined: Vec, - pub promise_declined: Vec, - pub accepted: Vec, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct InvoicedItems { - pub quantity: String, // we're using serde_yaml to convert the struct into a string - pub prices: String, // we're using serde_yaml to convert the struct into a string -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct InvoiceNote { - pub hha_id: ActionHashB64, - pub invoice_period_start: Timestamp, - pub invoice_period_end: Timestamp, - // This can be commented back in when the chc can support larger entries [#78](https://github.com/Holo-Host/servicelogger-rsm/pull/78) - // activity_logs_range: Vec, - // disk_usage_logs_range: Vec, - #[serde(flatten)] - pub invoiced_items: InvoicedItems, -} - -#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] -pub struct JurisdictionAndCategoryPreferences { - pub value: Vec, - pub is_exclusion: bool, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] -pub struct HostingPreferences { - pub max_fuel_before_invoice: Fuel, - pub price_compute: Fuel, - pub price_storage: Fuel, - pub price_bandwidth: Fuel, - pub max_time_before_invoice: Duration, - pub invoice_due_in_days: u8, - pub jurisdiction_prefs: Option, - pub categories_prefs: Option, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct ServiceloggerHappPreferences { - pub provider_pubkey: AgentPubKey, - pub max_fuel_before_invoice: Fuel, - pub price_compute: Fuel, - pub price_storage: Fuel, - pub price_bandwidth: Fuel, - pub max_time_before_invoice: Duration, - pub invoice_due_in_days: u8, // how many days after an invoice is created it it due -} - -pub struct PublisherJurisdiction { - pub happ_id: ActionHashB64, - pub jurisdiction: Option, -} diff --git a/src/types/happ.rs b/src/types/happ.rs new file mode 100644 index 0000000..602597d --- /dev/null +++ b/src/types/happ.rs @@ -0,0 +1,136 @@ +use holochain_types::prelude::MembraneProof; +use holofuel_types::fuel::Fuel; +use serde::Deserialize; +use serde::Serialize; +use std::collections::HashMap; +use std::collections::HashSet; +use std::time::Duration; +use tracing::warn; + +#[derive(Serialize, Debug, Clone)] +pub struct InstallHappBody { + pub happ_id: String, + pub preferences: HappPreferences, + pub membrane_proofs: HashMap, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct HappPreferences { + pub max_fuel_before_invoice: Fuel, + pub max_time_before_invoice: Duration, + pub price_compute: Fuel, + pub price_storage: Fuel, + pub price_bandwidth: Fuel, + pub invoice_due_in_days: u8, + pub jurisdiction_prefs: Option, + pub categories_prefs: Option, +} +impl HappPreferences { + pub fn is_happ_publisher_in_valid_jurisdiction( + &self, // host preferences + maybe_publisher_jurisdiction: &Option, + ) -> bool { + let (jurisdictions_list, is_exclusive_list) = match self.jurisdiction_prefs.to_owned() { + Some(c) => { + let jurisdictions_list: HashSet = c.value.iter().cloned().collect(); + (jurisdictions_list, c.is_exclusion) + } + None => { + warn!("Could not get publisher jurisdiction for happ."); + return false; + } + }; + + let publisher_jurisdiction = match maybe_publisher_jurisdiction { + Some(pj) => pj, + None => { + warn!("Could not get publisher jurisdiction for happ."); + return false; + } + }; + + let host_preferences_contain_happ_jurisdiction = + jurisdictions_list.contains(publisher_jurisdiction); + + if host_preferences_contain_happ_jurisdiction && is_exclusive_list { + // if the happ contains a jurisdiction that is in an exlusive list, then happ is invalid + return false; + } + if !host_preferences_contain_happ_jurisdiction && !is_exclusive_list { + // if the happ doesn't a jurisdiction that is in an inclusive list, then happ is invalid + return false; + } + + true + } + + pub fn is_happ_valid_category( + &self, // host preferences + happ_categories: &[String], + ) -> bool { + let (categories_list, is_exclusive_list) = match self.categories_prefs.to_owned() { + Some(c) => { + let categories_list: HashSet = c.value.iter().cloned().collect(); + (categories_list, c.is_exclusion) + } + None => { + warn!("Host's category preferences not available"); + return false; + } + }; + + let host_preferences_contain_happ_category = happ_categories + .iter() + .any(|category| categories_list.contains(category)); + + if host_preferences_contain_happ_category && is_exclusive_list { + // if the happ contains a category that is in an exlusive list, then happ is invalid + return false; + } + if !host_preferences_contain_happ_category && !is_exclusive_list { + // if the happ doesn't a category that is in an inclusive list, then happ is invalid + return false; + } + + true + } +} + +impl From for HappPreferences { + fn from(value: hpos_hc_connect::hha_types::HappPreferences) -> Self { + HappPreferences { + max_fuel_before_invoice: value.max_fuel_before_invoice, + max_time_before_invoice: value.max_time_before_invoice, + price_compute: value.price_compute, + price_storage: value.price_storage, + price_bandwidth: value.price_bandwidth, + invoice_due_in_days: value.invoice_due_in_days, + jurisdiction_prefs: if value.jurisdiction_prefs.is_some() { + Some(value.jurisdiction_prefs.unwrap().into()) + } else { + None + }, + categories_prefs: if value.categories_prefs.is_some() { + Some(value.categories_prefs.unwrap().into()) + } else { + None + }, + } + } +} + +// NB: This struct is currently only used for categories and jurisdictions +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct ExclusivePreferences { + pub value: Vec, + pub is_exclusion: bool, +} + +impl From for ExclusivePreferences { + fn from(value: hpos_hc_connect::hha_types::ExclusivePreferences) -> Self { + ExclusivePreferences { + value: value.value, + is_exclusion: value.is_exclusion, + } + } +} diff --git a/src/hbs.rs b/src/types/hbs.rs similarity index 70% rename from src/hbs.rs rename to src/types/hbs.rs index cd4f76e..acb1327 100644 --- a/src/hbs.rs +++ b/src/types/hbs.rs @@ -6,21 +6,52 @@ use hpos_hc_connect::hha_agent::HHAAgent; use hpos_hc_connect::hpos_agent::get_hpos_config; use reqwest::Response; use serde::{Deserialize, Serialize}; +use tracing::warn; -#[derive(Debug, Deserialize)] -pub struct HostingCriteria { +#[derive(Debug, Deserialize, Clone, Default)] +pub struct HostCredentials { #[allow(dead_code)] pub id: Option, pub jurisdiction: Option, + #[serde(default)] pub kyc: KycLevel, + // The following is also returned by this hbs endpoint: + // pub publicKey: Option, + // pub email: String, } -#[derive(Debug, Serialize, Deserialize, PartialEq)] +impl HostCredentials { + pub fn is_host_in_valid_jurisdiction( + &self, + should_exclude_happ_jurisdictions: bool, + happ_jurisdictions: &[String], + ) -> bool { + let host_jurisdiction = match self.jurisdiction.to_owned() { + Some(j) => j, + None => { + warn!("Host's jurisdiction not available"); + return false; + } + }; + if should_exclude_happ_jurisdictions { + // If the host jurisdiction is present in the list that the hApp Manager has used, + // then the host jurisdiction is invalid + !happ_jurisdictions.contains(&host_jurisdiction) + } else { + // Otherwise, the host jurisdiction is valid if it exists in the happ's list of jurisdictionss + happ_jurisdictions.contains(&host_jurisdiction) + } + } +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Default)] pub enum KycLevel { #[serde(rename = "holo_kyc_1")] + #[default] Level1, #[serde(rename = "holo_kyc_2")] Level2, } + pub struct HbsClient { pub client: reqwest::Client, } @@ -29,26 +60,23 @@ impl HbsClient { let client = reqwest::Client::builder().build()?; Ok(Self { client }) } - pub async fn get_hosting_criteria(&self) -> Option { + pub async fn get_host_hosting_criteria(&self) -> Option { match self.get_access_token().await { Ok(v) => v, Err(e) => { tracing::warn!("Unable to get kyc & jurisdiction: {:?}", e); tracing::warn!("returning default kyc level 1"); tracing::warn!("returning default jurisdiction of None"); - Some(HostingCriteria { - id: None, - jurisdiction: None, - kyc: KycLevel::Level1, - }) + Some(HostCredentials::default()) } } } - async fn get_access_token(&self) -> Result> { + async fn get_access_token(&self) -> Result> { let response = self.inner_get_access_token().await?; tracing::debug!("response received"); let mut body = response.text().await?; + // 504 Gateway Timeout // here we either need to retry once more or end the script if body.contains("error code: 504") { @@ -60,10 +88,11 @@ impl HbsClient { return Ok(None); } } + tracing::debug!("Result: {}", body); let result: serde_json::Value = serde_json::from_str(&body)?; - let h: HostingCriteria = serde_json::from_value(result)?; - tracing::debug!("HostingCriteria: {:?}", h); + let h: HostCredentials = serde_json::from_value(result)?; + tracing::debug!("HostCredentials: {:?}", h); Ok(Some(h)) } @@ -77,7 +106,9 @@ impl HbsClient { let mut core_app = HHAAgent::spawn(None).await?; let pub_key = core_app.pubkey().await?; + tracing::debug!("email: {:?}, pub_key: {:?}", email, pub_key); + #[derive(Serialize, Deserialize, Debug, PartialEq, Clone, SerializedBytes)] #[allow(non_snake_case)] struct Body { @@ -100,7 +131,8 @@ impl HbsClient { .into(), ) .await?; - tracing::debug!("Signature: {:?}", signature); + + tracing::trace!("Signature: {:?}", signature); let connection = Self::connect()?; let mut headers = reqwest::header::HeaderMap::new(); @@ -121,6 +153,6 @@ impl HbsClient { } } -pub fn hbs_url() -> Result { +fn hbs_url() -> Result { std::env::var("HBS_URL").context("Failed to read HBS_URL. Is it set in env?") } diff --git a/src/types/mod.rs b/src/types/mod.rs new file mode 100644 index 0000000..48967cc --- /dev/null +++ b/src/types/mod.rs @@ -0,0 +1,26 @@ +pub mod happ; +pub mod hbs; +pub mod transaction; +use holochain_types::dna::ActionHashB64; +use holochain_types::prelude::{holochain_serial, SerializedBytes}; +use serde::{Deserialize, Serialize}; + +pub struct HappBundle { + pub happ_id: ActionHashB64, + pub bundle_url: String, + pub is_paused: bool, + pub is_host_disabled: bool, + pub special_installed_app_id: Option, + pub jurisdictions: Vec, + pub exclude_jurisdictions: bool, + pub categories: Vec, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, SerializedBytes)] +pub struct PublishedHappDetails { + pub publisher_jurisdiction: Option, + pub happ_jurisdictions: Vec, + pub should_exclude_happ_jurisdictions: bool, + pub happ_categories: Vec, + pub is_disabled_by_host: bool, +} diff --git a/src/types/transaction.rs b/src/types/transaction.rs new file mode 100644 index 0000000..efbb8a5 --- /dev/null +++ b/src/types/transaction.rs @@ -0,0 +1,17 @@ +use holochain_types::dna::ActionHashB64; +use holochain_types::prelude::Timestamp; + +#[derive(serde::Serialize, serde::Deserialize, Debug)] +pub struct InvoicedItems { + pub quantity: String, // we're using serde_yaml to convert the struct into a string + pub prices: String, // we're using serde_yaml to convert the struct into a string +} + +#[derive(serde::Serialize, serde::Deserialize, Debug)] +pub struct InvoiceNote { + pub hha_id: ActionHashB64, + pub invoice_period_start: Timestamp, + pub invoice_period_end: Timestamp, + #[serde(flatten)] + pub invoiced_items: InvoicedItems, +} diff --git a/src/utils.rs b/src/utils.rs index dc77624..15927f8 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,78 +1,264 @@ -pub use crate::entries; -use crate::transaction_types::{ - HostingPreferences, InvoiceNote, PendingTransaction, ServiceloggerHappPreferences, POS, +use crate::types::PublishedHappDetails; +pub use crate::types::{ + happ::{HappPreferences, InstallHappBody}, + hbs::{HostCredentials, KycLevel}, + transaction::InvoiceNote, + HappBundle, }; -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result}; use chrono::Utc; -use holochain_types::dna::{ActionHashB64, AgentPubKey}; -use holochain_types::prelude::{ - AppManifest, ExternIO, FunctionName, MembraneProof, SerializedBytes, UnsafeBytes, ZomeName, -}; +use holochain_conductor_api::AppStatusFilter; +use holochain_types::dna::ActionHashB64; +use holochain_types::prelude::{AppManifest, MembraneProof, SerializedBytes, UnsafeBytes}; use holofuel_types::fuel::Fuel; -use hpos_hc_connect::app_connection::CoreAppRoleName; -use hpos_hc_connect::hha_agent::HHAAgent; -use hpos_hc_connect::hha_types::HappAndHost; -use hpos_hc_connect::holo_config::Config; -use hpos_hc_connect::AdminWebsocket; -use isahc::config::RedirectPolicy; -use isahc::{prelude::*, HttpClient}; +use hpos_hc_connect::{ + hha_agent::HHAAgent, + holofuel_types::{PendingTransaction, POS}, + utils::download_file, + AdminWebsocket, +}; use itertools::Itertools; use mr_bundle::Bundle; -use std::collections::HashSet; use std::{ - collections::HashMap, env, fs, path::PathBuf, process::Command, str::FromStr, sync::Arc, + collections::{HashMap, HashSet}, + env, + process::Command, + str::FromStr, + sync::Arc, time::Duration, }; -use tempfile::TempDir; -use tracing::{debug, error, info, instrument, trace, warn}; +use tracing::{debug, error, info, trace, warn}; use url::Url; -pub struct HappBundle { - pub happ_id: ActionHashB64, - pub bundle_url: String, - pub is_paused: bool, - pub is_host_disabled: bool, - pub special_installed_app_id: Option, - pub jurisdictions: Vec, - pub exclude_jurisdictions: bool, - pub categories: Vec, +/// @TODO: Temporary read-only mem-proofs solution +/// This fn should be replaced by calling the joining-code service and getting the appropriate proof for the agent +pub async fn load_mem_proof_file(bundle_url: &str) -> Result> { + let url = Url::parse(bundle_url)?; + + let path = download_file(&url).await?; + + let bundle = Bundle::read_from_file(&path).await?; + + let AppManifest::V1(manifest) = bundle.manifest(); + + Ok(manifest + .roles + .clone() + .iter() + .map(|role| { + ( + role.name.clone(), + Arc::new(SerializedBytes::from(UnsafeBytes::from(vec![0]))), + ) // The read only memproof is [0] (or in base64 `AA==`) + }) + .collect()) } -/// installs a happs that are mented to be hosted +pub async fn get_all_published_hosted_happs( + core_app_client: &mut HHAAgent, +) -> Result> { + trace!("get_all_published_hosted_happs"); + + let happ_bundles = core_app_client.get_happs().await?; + + let happ_bundle_ids = happ_bundles + .into_iter() + .map(|happ| { + trace!( + "{} with happ-id: {:?} and bundle: {}, is-paused={}", + happ.name, + happ.id, + happ.bundle_url, + happ.is_paused + ); + HappBundle { + happ_id: happ.id, + bundle_url: happ.bundle_url, + is_paused: happ.is_paused, + is_host_disabled: happ.host_settings.is_host_disabled, + special_installed_app_id: happ.special_installed_app_id, + jurisdictions: happ.jurisdictions, + exclude_jurisdictions: happ.exclude_jurisdictions, + categories: happ.categories, + } + }) + .collect(); + + trace!("got happ bundles"); + Ok(happ_bundle_ids) +} + +async fn get_holoport_id() -> Result { + let password = + env::var("DEVICE_SEED_DEFAULT_PASSWORD").expect("DEVICE_SEED_DEFAULT_PASSWORD is not set"); + let hpos_config_path = env::var("HPOS_CONFIG_PATH") + .expect("HPOS_CONFIG_PATH not found. please add the path to the environment variable"); + + let holoport_id_output = Command::new("hpos-config-into-base36-id") + .arg("--config-path") + .arg(hpos_config_path) + .arg("--password") + .arg(password) + .output() + .expect("Failed to execute command"); + let holoport_id = String::from_utf8_lossy(&holoport_id_output.stdout); + Ok(holoport_id.to_string()) +} + +// There are core infrastructure happs that should never be uninstalled. All uninstallable happs start with "uhCkk" and don't contain ::servicelogger +fn is_hosted_happ(installed_app_id: &str) -> bool { + installed_app_id.starts_with("uhCkk") && !installed_app_id.contains("::servicelogger") +} + +fn is_anonymous_instance(installed_app_id: &str) -> bool { + installed_app_id.starts_with("uhCkk") && installed_app_id.len() == 53 +} + +/// Returns true if `installed_app_id` represents an anonymous or identified instance of `happ_id` +fn is_instance_of_happ(happ_id: &str, installed_app_id: &str) -> bool { + // An `installed_app_id` is one of + // - A core hApp (e.g. `servicelogger:0_2_1::251e7cc8-9c48-4841-9eb0-435f0bf97373`) + // - An anonymous instance with installed_app_id == happ_id + // - An identified instance matching /happ_id::agent_id/ + // - A happ-specific servicelogger instance matching /happ_id::servicelogger/ + happ_id == installed_app_id // anonymous + || installed_app_id.starts_with(happ_id) && !installed_app_id.ends_with("servicelogger") +} + +pub async fn suspend_unpaid_happs( + core_app_client: &mut HHAAgent, + pending_transactions: PendingTransaction, +) -> Result> { + let mut suspended_happs: Vec = Vec::new(); + + let holoport_id = get_holoport_id().await?; + + for invoice in &pending_transactions.invoice_pending { + if let Some(POS::Hosting(_)) = &invoice.proof_of_service { + if let Some(expiration_date) = invoice.expiration_date { + if expiration_date.as_millis() < Utc::now().timestamp_millis() { + if let Some(note) = invoice.note.clone() { + let invoice_note: Result = serde_yaml::from_str(¬e); + match invoice_note { + Ok(note) => { + let hha_id = note.hha_id; + suspended_happs.push(hha_id.clone().to_string()); + core_app_client + .holo_disable_happ(&hha_id, &holoport_id) + .await?; + } + Err(e) => { + error!("Error parsing invoice note: {:?}", e); + } + } + } + } + } + } + } + + debug!("suspend happs completed: {:?}", suspended_happs); + Ok(suspended_happs) +} + +pub async fn should_be_enabled( + installed_happ_id: &String, + happ_id: String, + suspended_happs: Vec, + host_credentials: HostCredentials, // the kyc and jurisdiction of a host + host_happ_preferences: HappPreferences, // the hosting preferences a host sets + published_happ_details: HashMap, // the jurisdiction, categories, and publisher jurisdiction for each happ +) -> bool { + trace!( + "Running the `should_be_enabled check` for {}", + installed_happ_id + ); + + if suspended_happs.contains(&happ_id) { + trace!("Disabling suspended happ {}", happ_id); + return false; + } + + // Iterate over each happ details to run credentials check between the happ, publisher, and host: + if let Some(happ_registration_details) = published_happ_details.get(&happ_id) { + // Verify that the publisher's jurisdiction matches the host's jurisdiction preferences + if !host_happ_preferences.is_happ_publisher_in_valid_jurisdiction( + &happ_registration_details.publisher_jurisdiction, + ) { + warn!("Happ {} will be disabled/uninstalled", installed_happ_id); + // Return false; app should not remain installed/enabled if publisher juridiction is invalid + return false; + } + + // Verify that the host's jurisdiction matches the app's jurisdiction list - (ie: ensure that the hApp is allowed to run on the host's current jurisdiction) + // NB: The host's jurisdiction is taken from mongodb (via hbs) + if !host_credentials.is_host_in_valid_jurisdiction( + happ_registration_details.should_exclude_happ_jurisdictions, + &happ_registration_details.happ_jurisdictions, + ) { + warn!( + "Happ {} will be will be disabled/uninstalled", + installed_happ_id + ); + // Return false; app should not remain installed/enabled if host juridiction is invalid + return false; + } + + // Verify that the hApp category is a valid host category. + if !host_happ_preferences.is_happ_valid_category(&happ_registration_details.happ_categories) + { + warn!( + "Happ {} will be will be disabled/uninstalled", + installed_happ_id + ); + // Return false; app should not remain installed/enabled if happ category is invalid + return false; + }; + + // Check whether the expected happ is disabled by the host. + if happ_registration_details.is_disabled_by_host { + trace!( + "Disabling happ in Holochain Conductor {} because host disabled happ it in hha", + installed_happ_id + ); + return false; + } + } + + // NB: Happ-hosting is only valid (despite price prefs) if the host is >= kyc level 2 + host_credentials.kyc == KycLevel::Level2 +} + +/// Installs all happs that are eligible for hosting pub async fn install_holo_hosted_happs( - config: &Config, + core_app_client: &mut HHAAgent, + admin_port: u16, happs: &[HappBundle], is_kyc_level_2: bool, ) -> Result<()> { info!("Starting to install...."); - // Hardcoded servicelogger preferences for all the hosted happs installed - let preferences = entries::HappPreferences { - max_fuel_before_invoice: Fuel::from_str("1000")?, // MAX_TX_AMT in holofuel is currently hard-coded to 50,000 - max_time_before_invoice: Duration::default(), - price_compute: Fuel::from_str("0.025")?, - price_storage: Fuel::from_str("0.025")?, - price_bandwidth: Fuel::from_str("0.025")?, - } - .save()?; - if happs.is_empty() { info!("No happs registered to be enabled for hosting."); return Ok(()); } - let mut admin_websocket = AdminWebsocket::connect(config.admin_port) + let mut admin_websocket = AdminWebsocket::connect(admin_port) .await .context("failed to connect to holochain's admin interface")?; - let running_happs = Arc::new( + let enabled_happs = Arc::new( admin_websocket - .list_running_app() + .list_apps(Some(AppStatusFilter::Enabled)) .await .context("failed to get installed hApps")?, ); - - trace!("running_happs {:?}", running_happs); + let enabled_happ_ids: Vec<&String> = enabled_happs + .iter() + .map(|h| &h.installed_app_id) + .unique() + .collect(); + trace!("enabled_happs {:?}", enabled_happ_ids); let client = reqwest::Client::new(); @@ -90,34 +276,45 @@ pub async fn install_holo_hosted_happs( categories: _, } in happs { - // Check if special happ is installed and do nothing if it is installed trace!("Trying to install {}", happ_id); + + // Currently, the Hosted HoloFuel and Cloud Console happs should have a `special_installed_app_id`. + // If happ has a `special_installed_app_id`, the happ relies on the core-app for dna calls. + // In this case we only need to confirm that the hosted happ has an enabled sl instance. + // If it does have a runnning SL, we consider the app ready for use and and do nothing + // ...otherwise, we proceed to install, which leads to the installation of a sl instance for this happ if special_installed_app_id.is_some() - && running_happs.contains(&format!("{}::servicelogger", happ_id)) + && enabled_happ_ids.contains(&&format!("{}::servicelogger", happ_id)) { - // We do not need to install bc we never pause this app as we do not want our core-app to be uninstalled ever + // Skip the install/enable step + // NB: We expect our core-app to already be installed and enabled as we never pause/disable/uninstall it trace!( "Special App {:?} already installed", special_installed_app_id ); } - // Check if happ is already installed and disable it if the publisher has paused happ in hha - // NB: This condition/check will miss hosted holofuel as that happ is never installed under its happ_id - // This means it will always try and fail to install holofuel again - // Right now, we don't care - else if running_happs.contains(&format!("{}", happ_id)) { + // Iterate through all currently enabled apps + // (NB: The sole exceptions here are Hosted HoloFuel and Cloud Console, as they should always be caught by the prior condition.) + else if enabled_happ_ids.contains(&&format!("{}", happ_id)) { trace!("App {} already installed", happ_id); + // Check if this happ was paused by the publisher in hha and disable it in holochain if so if *is_paused { - trace!("Pausing {}", happ_id); + trace!( + "Found paused happ in holo {} - disabling happ on holochain conductor.", + happ_id + ); admin_websocket.disable_app(&happ_id.to_string()).await?; } else { - // Check if installed happ is eligible to be enabled for host and enable, if so + // Check if happ is eligible to be holo-enabled for host and if so, holo-enable happ if is_kyc_level_2 { - trace!("Enabling {}", happ_id); - admin_websocket.enable_app(&happ_id.to_string()).await?; + trace!("Enabling happ {} for holo hosting", happ_id); + let holoport_id = get_holoport_id().await?; + core_app_client + .holo_enable_happ(happ_id, &holoport_id) + .await?; } else { trace!( - "Not enabling installed {} app due to failed price check for kyc level", + "Not holo-enabling {} app due to failed price check for kyc level", happ_id ); } @@ -136,9 +333,9 @@ pub async fn install_holo_hosted_happs( "Skipping hosting of happ {} due to host's kyc level ", happ_id ); - } - // else install the hosted happ read-only instance - else { + } else { + // else, install the hosted happ read-only instance + // (NB: The read-only instance is an instance of the app that installed with the host agent pubkey and a read-only memproof.) trace!("Load mem-proofs for {}", happ_id); let mem_proof: HashMap = load_mem_proof_file(bundle_url).await.unwrap_or_default(); @@ -148,8 +345,27 @@ pub async fn install_holo_hosted_happs( mem_proof ); - // We'd like to move the logic from `install_hosted_happ` out of `hpos-api` and into this service where it belongs - let body = entries::InstallHappBody { + // Hardcoded servicelogger preferences for all the hosted happs installed + let preferences = HappPreferences { + max_fuel_before_invoice: Fuel::from_str("1000")?, // MAX_TX_AMT in holofuel is currently hard-coded to 50,000 + max_time_before_invoice: Duration::default(), + price_compute: Fuel::from_str("0.025")?, + price_storage: Fuel::from_str("0.025")?, + price_bandwidth: Fuel::from_str("0.025")?, + invoice_due_in_days: 7, + jurisdiction_prefs: None, + categories_prefs: None, + }; + + // The installation implementation can be found in`hpos-api` here: https://github.com/Holo-Host/hpos-api-rust/blob/develop/src/handlers/install/mod.rs#L31 + // NB: The `/install_hosted_happ` endpoint will holo-enable the app if it is already installed and enabled on hololchain, + // ...otherwise it takes the following 5 steps: + // 1. installs sl for the app, + // 2. holochain-enables the app's sl, + // 3. installs the app on holochain (NB: The app installs with the host agent pubkey as this a read-only instance), + // 4. holochain-enables the app, + // 5. holo-enables the app + let body = InstallHappBody { happ_id: happ_id.to_string(), preferences: preferences.clone(), membrane_proofs: mem_proof.clone(), @@ -159,459 +375,119 @@ pub async fn install_holo_hosted_happs( .json(&body) .send() .await?; - info!("Installed happ-id {}", happ_id); - trace!("Install happ Response {:?}", response); - - // If app was already installed but disabled, the above install will fail, and we just enable it here - let result = admin_websocket.enable_app(&happ_id.to_string()).await; - - trace!("Enable app result {:?}", result); + trace!("`/v2/apps/hosted/install` happ response {:?}", response); + info!("Installed and enabled happ-id {}", happ_id); } } Ok(()) } -/// Temporary read-only mem-proofs solution -/// should be replaced by calling the joining-code service and getting the appropriate proof for the agent -pub async fn load_mem_proof_file(bundle_url: &str) -> Result> { - let url = Url::parse(bundle_url)?; - - let path = download_file(&url).await?; - - let bundle = Bundle::read_from_file(&path).await?; - - let AppManifest::V1(manifest) = bundle.manifest(); - - Ok(manifest - .roles - .clone() - .iter() - .map(|role| { - ( - role.name.clone(), - Arc::new(SerializedBytes::from(UnsafeBytes::from(vec![0]))), - ) // The read only memproof is [0] (or in base64 `AA==`) - }) - .collect()) -} - -#[instrument(err, skip(url))] -pub(crate) async fn download_file(url: &Url) -> Result { - let path = if url.scheme() == "file" { - let p = PathBuf::from(url.path()); - trace!("Using: {:?}", p); - p - } else { - trace!("downloading"); - let mut url = Url::clone(url); - url.set_scheme("https") - .map_err(|_| anyhow!("failed to set scheme to https"))?; - let client = HttpClient::builder() - .redirect_policy(RedirectPolicy::Follow) - .build() - .context("failed to initiate download request")?; - let mut response = client - .get(url.as_str()) - .context("failed to send GET request")?; - if !response.status().is_success() { - return Err(anyhow!( - "response status code {} indicated failure", - response.status().as_str() - )); - } - let dir = TempDir::new().context("failed to create tempdir")?; - let url_path = PathBuf::from(url.path()); - let basename = url_path - .file_name() - .context("failed to get basename from url")?; - let path = dir.into_path().join(basename); - let mut file = fs::File::create(&path).context("failed to create target file")?; - response - .copy_to(&mut file) - .context("failed to write response to file")?; - trace!("download successful"); - path - }; - Ok(path) -} - -pub async fn get_all_published_hosted_happs( - core_app_client: &mut HHAAgent, -) -> Result> { - trace!("get_all_published_hosted_happs"); - - let happ_bundles: Vec = core_app_client - .app - .zome_call_typed( - CoreAppRoleName::HHA.into(), - ZomeName::from("hha"), - FunctionName::from("get_happs"), - (), - ) - .await?; - - let happ_bundle_ids = happ_bundles - .into_iter() - .map(|happ| { - trace!( - "{} with happ-id: {:?} and bundle: {}, is-paused={}", - happ.name, - happ.id, - happ.bundle_url, - happ.is_paused - ); - HappBundle { - happ_id: happ.id, - bundle_url: happ.bundle_url, - is_paused: happ.is_paused, - is_host_disabled: happ.host_settings.is_host_disabled, - special_installed_app_id: happ.special_installed_app_id, - jurisdictions: happ.jurisdictions, - exclude_jurisdictions: happ.exclude_jurisdictions, - categories: happ.categories, - } - }) - .collect(); - - trace!("got happ bundles"); - Ok(happ_bundle_ids) -} - -pub async fn get_pending_transactions( - core_app_client: &mut HHAAgent, -) -> Result { - let pending_transactions: PendingTransaction = core_app_client - .app - .zome_call_typed( - CoreAppRoleName::Holofuel.into(), - ZomeName::from("transactor"), - FunctionName::from("get_pending_transactions"), - (), - ) - .await?; - - trace!("got pending transactions"); - Ok(pending_transactions) -} - -/// Ineligible Happs = old holo-hosted happs, holo-disabled happs, or happs with invalid pricing for kyc level +/// Ineligible Happs = old holo-hosted happs, holo-disabled happs, or happs with one of the following: +/// - 1. an invalid pricing for kyc level, 2. invalid pricing preference, 3. invalid uptime, or 4. invalid jurisdiction /// Handles ineligible happs for 2 cases - identified and anonymous hosted agents: /// - Identified: Uninstalls & removes identified instances of ineligible happs /// - Anonymous: Disables anonymous instance of ineligible happs -pub async fn uninstall_ineligible_happs( - config: &Config, - published_happs: &[HappBundle], - is_kyc_level_2: bool, +pub async fn handle_ineligible_happs( + core_app_client: &mut HHAAgent, + admin_port: u16, suspended_happs: Vec, - jurisdiction: Option, - hosting_preferences: HostingPreferences, - publisher_jurisdictions: HashMap>, + host_credentials: HostCredentials, + host_happ_preferences: HappPreferences, + published_happ_details: HashMap, ) -> Result<()> { info!("Checking to uninstall happs that were removed from the hosted list...."); - let mut admin_websocket = AdminWebsocket::connect(config.admin_port) + let mut happs_to_holo_disable = HashSet::new(); + + let mut admin_websocket = AdminWebsocket::connect(admin_port) .await .context("Failed to connect to holochain's admin interface")?; - let running_happ_ids = admin_websocket - .list_running_app() - .await - .context("Failed to get installed and running hApps")?; - - let unique_running_happ_ids: Vec<&String> = running_happ_ids.iter().unique().collect(); - - trace!("Unique_running_happ_ids {:?}", unique_running_happ_ids); - - for happ_id in unique_running_happ_ids { - if should_be_installed( - happ_id, - published_happs, - is_kyc_level_2, - suspended_happs.clone(), - jurisdiction.clone(), - hosting_preferences.clone(), - publisher_jurisdictions.clone(), - ) + let enabled_happs = admin_websocket + .list_apps(Some(AppStatusFilter::Enabled)) .await - { - info!( - "Skipping uninstall of {} as it should remain installed", - happ_id - ); - continue; - } - - if is_anonymous_instance(happ_id) { - info!("Disabling {}", happ_id); - admin_websocket.disable_app(happ_id).await?; - } else { - info!("Uninstalling {}", happ_id); - admin_websocket.uninstall_app(happ_id).await?; - } - } - info!("Done uninstalling happs that were removed from the hosted list."); - - Ok(()) -} - -// There are core infrastructure happs that should never be uninstalled. All uninstallable happs start with "uhCkk" and don't contain ::servicelogger -fn is_hosted_happ(app: &str) -> bool { - app.starts_with("uhCkk") && !app.contains("::servicelogger") -} + .context("Failed to get installed and enabled hApps")?; -fn is_anonymous_instance(happ_id: &str) -> bool { - happ_id.starts_with("uhCkk") && happ_id.len() == 53 -} - -/// Returns true if `installed_app_id` represents an anonymous or identified instance of `happ_id` -fn is_instance_of_happ(happ_id: &str, installed_app_id: &str) -> bool { - // An `installed_app_id` is one of - // - A core hApp (e.g. `servicelogger:0_2_1::251e7cc8-9c48-4841-9eb0-435f0bf97373`) - // - An anonymous instance with installed_app_id == happ_id - // - An identified instance matching /happ_id::agent_id/ - // - A happ-specific servicelogger instance matching /happ_id::servicelogger/ - installed_app_id.starts_with(happ_id) && !installed_app_id.ends_with("servicelogger") -} - -pub async fn should_be_installed( - running_happ_id: &String, - published_happs: &[HappBundle], - is_kyc_level_2: bool, - suspended_happs: Vec, - jurisdiction: Option, - hosting_preferences: HostingPreferences, - publisher_jurisdictions: HashMap>, -) -> bool { - trace!("`should_be_installed check` for {}", running_happ_id); - // This should be the first check since the core-app should never be uninstalled currently - if !is_hosted_happ(running_happ_id) { - trace!("Keeping infrastructure happ {}", running_happ_id); - return true; - } - - // checks if published happ is still running - let published_happ = published_happs + let enabled_happ_ids: Vec<&String> = enabled_happs .iter() - .find(|&happ| happ.happ_id.to_string() == *running_happ_id); - - if suspended_happs.contains(running_happ_id) { - trace!("Disabling suspended happ {}", running_happ_id); - return false; - } - - if let Some(jurisdiction_preferences) = hosting_preferences.jurisdiction_prefs { - let publisher_jurisdiction = publisher_jurisdictions.get(running_happ_id); - match publisher_jurisdiction { - Some(jurisdiction) => match jurisdiction { - Some(jurisdiction) => { - let mut is_jurisdiction_in_list = false; - if jurisdiction_preferences - .value - .iter() - .any(|host_jurisdiction| *host_jurisdiction == *jurisdiction) - { - is_jurisdiction_in_list = true; - } - if jurisdiction_preferences.is_exclusion && is_jurisdiction_in_list { - return false; - } - if !jurisdiction_preferences.is_exclusion && !is_jurisdiction_in_list { - return false; - } - } - _ => { - warn!("could not get publisher jurisdiction"); - warn!("happ {} won't be installed", running_happ_id); - return false; - } - }, - _ => { - warn!("could not get publisher jurisdiction"); - warn!("happ {} won't be installed", running_happ_id); - return false; - } - } - } - - // verify the hApp is allowed to run on this jurisdiction. - // jurisdiction is taken from mongodb and compared against hApps jurisdictions - match jurisdiction { - Some(jurisdiction) => { - if let Some(happ) = published_happ { - let mut is_jurisdiction_in_list = false; - if let Some(_happ_jurisdiction) = happ - .jurisdictions - .iter() - .find(|&happ_jurisdiction| *happ_jurisdiction == jurisdiction) - { - is_jurisdiction_in_list = true; - } - if happ.exclude_jurisdictions && is_jurisdiction_in_list { - return false; - } - if !happ.exclude_jurisdictions && !is_jurisdiction_in_list { - return false; + .map(|h| &h.installed_app_id) + .unique() + .collect(); + trace!("enabled_happ_ids {:?}", enabled_happ_ids); + + let published_happ_ids: Vec = published_happ_details.clone().into_keys().collect(); + trace!("published_happ_ids {:?}", published_happ_ids); + + for enabled_happ_id in enabled_happ_ids { + // Deteremine if the enabled happ is an instance of a published happ + let maybe_hosted_instance_happ_id = published_happ_ids + .clone() + .into_iter() + .find(|published_happ_id| is_instance_of_happ(published_happ_id, enabled_happ_id)); + + let should_happ_remain_enabled = match maybe_hosted_instance_happ_id { + Some(happ_id) => { + trace!("Found hosted happ instance {:?}", &happ_id); + + let should_remain_enabled = should_be_enabled( + &enabled_happ_id.to_string(), + happ_id.clone(), + suspended_happs.clone(), + host_credentials.clone(), + host_happ_preferences.clone(), + published_happ_details.clone(), + ) + .await; + + if !should_remain_enabled { + happs_to_holo_disable.insert(happ_id); } - } - } - None => { - warn!("jurisdiction not available for holoport"); - warn!("happ {} won't be installed", running_happ_id); - return false; - } - } - - if let Some(categories_preferences) = hosting_preferences.categories_prefs { - // verify the happ matches the hosting categories preferences - if let Some(happ) = published_happ { - let categories_list: HashSet = - categories_preferences.value.iter().cloned().collect(); - let contains_category = happ - .categories - .iter() - .any(|category| categories_list.contains(category)); - - if contains_category && categories_preferences.is_exclusion { - return false; + should_remain_enabled } - if !contains_category && !categories_preferences.is_exclusion { - return false; + None => { + // Filter out the infrastructure apps (ie: the core apps) + if !is_hosted_happ(enabled_happ_id) { + trace!("Keeping infrastructure happ {}", enabled_happ_id); + true + } else { + // The enabled happ is not a hosted instance of the happ nor a core app, so it shouldn't remain installed/enabled + false + } } - } - } + }; - // The running happ is an instance of an expected happ - let expected_happ = published_happs.iter().find(|published_happ| { - is_instance_of_happ(&published_happ.happ_id.to_string(), running_happ_id) - }); - - trace!( - "Found expected_happ {:?}", - &expected_happ.map(|eh| &eh.happ_id) - ); - - if let Some(expected_happ) = expected_happ { - // if the expected happ is disabled by the host, happ shouldn't be installed - if expected_happ.is_host_disabled { - trace!( - "Disabling happ {} because host was disabled it in hha", - expected_happ.happ_id + if should_happ_remain_enabled { + // If the happ should remain disabled, we leave the happ status unchanged and continue to next happ + info!( + "Skipping disabling/uninstalling of {} as it should remain enabled", + enabled_happ_id ); - return false; - } - - // happ hosting is only valid (despite price prefs) if the host is >= kyc level 2 - is_kyc_level_2 - } else { - // The running happ is not an instance of any expected happ, so shouldn't be installed - false - } -} - -pub async fn suspend_unpaid_happs( - core_app_client: &mut HHAAgent, - pending_transactions: PendingTransaction, -) -> Result> { - let mut suspended_happs: Vec = Vec::new(); - - let password = - env::var("DEVICE_SEED_DEFAULT_PASSWORD").expect("DEVICE_SEED_DEFAULT_PASSWORD is not set"); - let hpos_config_path = env::var("HPOS_CONFIG_PATH") - .expect("HPOS_CONFIG_PATH not found. please add the path to the environment variable"); - let holoport_id_output = Command::new("hpos-config-into-base36-id") - .arg("--config-path") - .arg(hpos_config_path) - .arg("--password") - .arg(password) - .output() - .expect("Failed to execute command"); - let holoport_id = String::from_utf8_lossy(&holoport_id_output.stdout); - - for invoice in &pending_transactions.invoice_pending { - if let Some(POS::Hosting(_)) = &invoice.proof_of_service { - if let Some(expiration_date) = invoice.expiration_date { - if expiration_date.as_millis() < Utc::now().timestamp_millis() { - if let Some(note) = invoice.note.clone() { - let invoice_note: Result = serde_yaml::from_str(¬e); - match invoice_note { - Ok(note) => { - let hha_id = note.hha_id; - suspended_happs.push(hha_id.clone().to_string()); - core_app_client - .app - .zome_call_typed( - CoreAppRoleName::HHA.into(), - ZomeName::from("hha"), - FunctionName::from("disable_happ"), - ExternIO::encode(HappAndHost { - happ_id: hha_id.clone(), - holoport_id: holoport_id.to_string(), - })?, - ) - .await?; - } - Err(e) => { - error!("Error parsing invoice note: {:?}", e); - } - } - } - } + continue; + } else { + // If apps should no longer remain enabled, we need to take two steps: + // Step 1: disable or uninstall app from Holochain Conductor (depending on instance type) + if is_anonymous_instance(enabled_happ_id) { + // Anonymous apps are only disabled, never uninstalled, as they are currently use a readonly instance of the host's instance of the app + info!("Holochain-disabling {}", enabled_happ_id); + admin_websocket.disable_app(enabled_happ_id).await?; + } else { + info!("Uninstalling {} from Holochain Conductor", enabled_happ_id); + admin_websocket.uninstall_app(enabled_happ_id).await?; } } } - debug!("suspend happs completed: {:?}", suspended_happs); - Ok(suspended_happs) -} - -pub async fn get_hosting_preferences(core_app_client: &mut HHAAgent) -> Result { - let hosting_preferences: HostingPreferences = core_app_client - .app - .zome_call_typed( - CoreAppRoleName::HHA.into(), - ZomeName::from("hha"), - FunctionName::from("get_default_happ_preferences"), - (), - ) - .await?; - - trace!("got hosting preferences"); - Ok(hosting_preferences) -} - -pub async fn get_happ_preferences( - core_app_client: &mut HHAAgent, - happ_id: ActionHashB64, -) -> Result { - let happ_preference: ServiceloggerHappPreferences = core_app_client - .app - .zome_call_typed( - CoreAppRoleName::HHA.into(), - ZomeName::from("hha"), - FunctionName::from("get_happ_preferences"), - happ_id, - ) - .await?; - - trace!("got happ preferences"); - Ok(happ_preference) -} + // Step 2: disable hosted happ in hha (holo hosting) + for happ_id in happs_to_holo_disable { + info!("Holo-disabling {}", happ_id); + let holoport_id = get_holoport_id().await?; + let happ_id_hash = ActionHashB64::from_b64_str(&happ_id)?; + core_app_client + .holo_disable_happ(&happ_id_hash, &holoport_id) + .await?; + } -pub async fn get_publisher_jurisdiction( - core_app_client: &mut HHAAgent, - pubkey: AgentPubKey, -) -> Result> { - let publisher_jurisdiction: Option = core_app_client - .app - .zome_call_typed( - CoreAppRoleName::HHA.into(), - ZomeName::from("hha"), - FunctionName::from("get_publisher_jurisdiction"), - pubkey, - ) - .await?; - - trace!("got publisher jurisdiction"); - Ok(publisher_jurisdiction) + info!("Done disabling/uninstalling all ineligible happs"); + Ok(()) }