Skip to content

Commit

Permalink
Merge pull request #7 from gierens/page-cache
Browse files Browse the repository at this point in the history
Add Page Cache

This adds library functions page_get_minimal and page_get_updated_at and the page.PageCache to the FUSE code. Fs uses those to cache pages based on their updated_at field and only retrieve the actually needed data from the API.

Resolves #2
  • Loading branch information
gierens authored Oct 1, 2023
2 parents 3188b44 + 7209754 commit fa0c8f3
Show file tree
Hide file tree
Showing 6 changed files with 295 additions and 7 deletions.
13 changes: 13 additions & 0 deletions gql/query/page/page_get_minimal.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
query PageGetMinimal($id: Int!) {
pages {
single (id: $id) {
id
path
content
createdAt
updatedAt
editor
locale
}
}
}
7 changes: 7 additions & 0 deletions gql/query/page/page_get_updated_at.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
query PageGetUpdatedAt($id: Int!) {
pages {
single (id: $id) {
updatedAt
}
}
}
28 changes: 21 additions & 7 deletions src/fuse/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use fuser::{
ReplyEntry, ReplyWrite, Request, TimeOrNow,
};
use libc::{EINVAL, EIO, EISDIR, ENOENT, O_TRUNC};
use wikijs::page::{Page, PageTreeItem, PageTreeMode};
use wikijs::page::{PageMinimal, PageTreeItem, PageTreeMode};
use wikijs::{Api, Credentials};

use chrono::DateTime;
Expand All @@ -19,9 +19,11 @@ use colored::Colorize;
#[allow(unused_imports)]
use log::{debug, error, info, trace, warn};

mod page;

#[allow(clippy::large_enum_variant)]
enum Inode {
Page(Page),
Page(PageMinimal),
Directory(Vec<PageTreeItem>),
}

Expand Down Expand Up @@ -110,18 +112,23 @@ impl From<u64> for InodeType {
struct Fs {
api: Api,
locale: String,
page_cache: page::PageCache,
}

impl Fs {
pub fn new(api: Api, locale: String) -> Self {
Self { api, locale }
Self {
api,
locale,
page_cache: page::PageCache::new(),
}
}

fn get_inode(&self, ino: u64) -> Option<Inode> {
fn get_inode(&mut self, ino: u64) -> Option<Inode> {
match InodeType::from(ino) {
InodeType::Page(id) => {
debug!("get_inode: page {}", id);
match self.api.page_get(id) {
match self.page_cache.get(&self.api, id as u64) {
Ok(page) => Some(Inode::Page(page)),
Err(_) => None,
}
Expand Down Expand Up @@ -249,7 +256,11 @@ impl Filesystem for Fs {
if size < content.len() as u64 {
content.truncate(std::cmp::max(size as usize, 1));
}
match self.api.page_update_content(page.id, content) {
match self.page_cache.update_content(
&self.api,
page.id as u64,
content,
) {
Ok(_) => {
debug!("setattr: updated inode {}", ino);
let attr = match self.get_inode(ino) {
Expand Down Expand Up @@ -568,7 +579,10 @@ impl Filesystem for Fs {
}
debug!("write: inode {} from {} to {}", ino, offset, end);

match self.api.page_update_content(page.id, content) {
match self
.page_cache
.update_content(&self.api, page.id as u64, content)
{
Ok(_) => {
debug!("write: updated inode {}", ino);
reply.written(data.len() as u32);
Expand Down
62 changes: 62 additions & 0 deletions src/fuse/page.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
use std::collections::HashMap;
use wikijs::{page::PageError, page::PageMinimal, Api};

pub(crate) struct PageCache {
pages: HashMap<u64, PageMinimal>,
}

#[allow(dead_code)]
impl PageCache {
pub(crate) fn new() -> Self {
Self {
pages: HashMap::new(),
}
}

pub(crate) fn get(
&mut self,
api: &Api,
id: u64,
) -> Result<PageMinimal, PageError> {
if let Some(page) = self.pages.get(&id) {
let updated_at = api.page_get_updated_at(id as i64)?;
if updated_at != page.updated_at {
let page = api.page_get_minimal(id as i64)?;
self.pages.insert(id, page.clone());
Ok(page)
} else {
Ok(page.clone())
}
} else {
let page = api.page_get_minimal(id as i64)?;
self.pages.insert(id, page.clone());
Ok(page)
}
}

pub(crate) fn evict(&mut self, id: u64) {
self.pages.remove(&id);
}

pub(crate) fn refetch(
&mut self,
api: &Api,
id: u64,
) -> Result<PageMinimal, PageError> {
self.pages.remove(&id);
let page = api.page_get_minimal(id as i64)?;
self.pages.insert(id, page.clone());
Ok(page)
}

pub(crate) fn update_content(
&mut self,
api: &Api,
id: u64,
content: String,
) -> Result<(), PageError> {
api.page_update_content(id as i64, content)?;
self.refetch(api, id)?;
Ok(())
}
}
38 changes: 38 additions & 0 deletions src/lib/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,44 @@ impl Api {
page::page_get(&self.client, &format!("{}/graphql", self.url), id)
}

/// Get datetime of last update of a page.
///
/// # Arguments
/// * `id` - The id of the page to get the last update datetime of.
///
/// # Returns
/// A Result containing either the datetime string or a page error.
#[allow(dead_code)]
pub fn page_get_updated_at(
&self,
id: i64,
) -> Result<String, page::PageError> {
page::page_get_updated_at(
&self.client,
&format!("{}/graphql", self.url),
id,
)
}

/// Get a page's minimal information.
///
/// # Arguments
/// * `id` - The id of the page to get the minimal information of.
///
/// # Returns
/// A Result containing either the minimal page information or a page error.
#[allow(dead_code)]
pub fn page_get_minimal(
&self,
id: i64,
) -> Result<page::PageMinimal, page::PageError> {
page::page_get_minimal(
&self.client,
&format!("{}/graphql", self.url),
id,
)
}

/// Get a page by its path.
///
/// # Arguments
Expand Down
154 changes: 154 additions & 0 deletions src/lib/page.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,20 @@ pub struct Page {
pub creator_email: String,
}

#[derive(Deserialize, Debug, Clone)]
#[allow(dead_code)]
pub struct PageMinimal {
pub id: Int,
pub path: String,
pub content: String,
#[serde(rename = "createdAt")]
pub created_at: Date,
#[serde(rename = "updatedAt")]
pub updated_at: Date,
pub editor: String,
pub locale: String,
}

#[derive(Deserialize, Debug)]
pub struct PageListItem {
pub id: Int,
Expand Down Expand Up @@ -2310,3 +2324,143 @@ pub fn page_history_purge(
}
Err(classify_response_error(response_body.errors))
}

pub(crate) mod page_get_updated_at {
use super::*;

pub struct PageGetUpdatedAt;

pub const OPERATION_NAME: &str = "PageGetUpdatedAt";
pub const QUERY : & str = "query PageGetUpdatedAt($id: Int!) {\n pages {\n single (id: $id) {\n updatedAt\n }\n }\n}\n" ;

#[derive(Serialize)]
pub struct Variables {
pub id: Int,
}

impl Variables {}

#[derive(Deserialize)]
pub struct ResponseData {
pub pages: Option<Pages>,
}

#[derive(Deserialize)]
pub struct Pages {
pub single: Option<Single>,
}

#[derive(Deserialize)]
pub struct Single {
#[serde(rename = "updatedAt")]
pub updated_at: Date,
}

impl graphql_client::GraphQLQuery for PageGetUpdatedAt {
type Variables = Variables;
type ResponseData = ResponseData;
fn build_query(
variables: Self::Variables,
) -> ::graphql_client::QueryBody<Self::Variables> {
::graphql_client::QueryBody {
variables,
query: QUERY,
operation_name: OPERATION_NAME,
}
}
}
}

pub fn page_get_updated_at(
client: &Client,
url: &str,
id: i64,
) -> Result<Date, PageError> {
let variables = page_get_updated_at::Variables { id };
let response = post_graphql::<page_get_updated_at::PageGetUpdatedAt, _>(
client, url, variables,
);
if response.is_err() {
return Err(PageError::UnknownErrorMessage {
message: response.err().unwrap().to_string(),
});
}

let response_body = response.unwrap();

if let Some(data) = response_body.data {
if let Some(pages) = data.pages {
if let Some(single) = pages.single {
return Ok(single.updated_at);
}
}
}
Err(classify_response_error(response_body.errors))
}

pub(crate) mod page_get_minimal {
use super::*;

pub struct PageGetMinimal;

pub const OPERATION_NAME: &str = "PageGetMinimal";
pub const QUERY : & str = "query PageGetMinimal($id: Int!) {\n pages {\n single (id: $id) {\n id\n path\n content\n createdAt\n updatedAt\n editor\n locale\n }\n }\n}\n" ;
#[derive(Serialize)]

pub struct Variables {
pub id: Int,
}

impl Variables {}

#[derive(Deserialize)]
pub struct ResponseData {
pub pages: Option<Pages>,
}

#[derive(Deserialize)]
pub struct Pages {
pub(crate) single: Option<PageMinimal>,
}

impl graphql_client::GraphQLQuery for PageGetMinimal {
type Variables = Variables;
type ResponseData = ResponseData;
fn build_query(
variables: Self::Variables,
) -> ::graphql_client::QueryBody<Self::Variables> {
::graphql_client::QueryBody {
variables,
query: QUERY,
operation_name: OPERATION_NAME,
}
}
}
}

pub fn page_get_minimal(
client: &Client,
url: &str,
id: i64,
) -> Result<PageMinimal, PageError> {
let variables = page_get_minimal::Variables { id };
let response = post_graphql::<page_get_minimal::PageGetMinimal, _>(
client, url, variables,
);
if response.is_err() {
return Err(PageError::UnknownErrorMessage {
message: response.err().unwrap().to_string(),
});
}

let response_body = response.unwrap();

if let Some(data) = response_body.data {
if let Some(pages) = data.pages {
if let Some(single) = pages.single {
return Ok(single);
}
}
}
Err(classify_response_error(response_body.errors))
}

0 comments on commit fa0c8f3

Please sign in to comment.