Skip to content

Commit

Permalink
async and sqlite cookie support
Browse files Browse the repository at this point in the history
  • Loading branch information
Piotr Czajka committed Sep 17, 2023
1 parent 4aef5c8 commit 2776fe4
Show file tree
Hide file tree
Showing 7 changed files with 238 additions and 112 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@
**/*.rs.bk
Cargo.lock
tags
cookies.sqlite-shm
cookies.sqlite-wal
5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "browsercookie-rs"
version = "0.1.1"
version = "0.2.0"
authors = ["Bharadwaj Machiraju <tunnelshade.in>", "Piotr Czajka <piotr.filip.czajka@gmail.com"]
edition = "2018"
repository = "https://github.com/ginkooo/browsercookie-rs"
Expand Down Expand Up @@ -32,3 +32,6 @@ serde = { version = "1", features = ["derive"]}
serde_json = "1.0.39"
regex = "1"
clap = "2"
tokio = { version = "1", features = ["full"] }
sqlx = { version = "0.7.1", features = ["sqlite"] }
futures = "0.3.28"
15 changes: 10 additions & 5 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,18 @@ Using the library is quite simple
.. code-block:: rust
use browsercookie::{Browsercookies, Browser};
use browsercookie::{CookieFinder, Browser, Attribute};
let mut bc = Browsercookies::new();
let domain_regex = Regex::new("google.com").unwrap();
let mut cookie_jar = CookieFinder::builder()
.with_regexp(Regex::new("google.com").unwrap(), Attribute::Domain)
.with_browser(Browser::Firefox)
.build
.find()
.await.unwrap();
bc.from_browser(Browser::Firefox, &domain_regex).expect("Failed to get cookies from firefox");
println!("Cookie header string: Cookie: {}", bc.to_header(domain_regex));
let cookie = cookie_jar.get("some_cookie_name").unwrap();
println!("Cookie header string: Cookie: {}", cookie);
Better example should be present in `browsercookies <src/bin.rs>`_.

Expand Down
114 changes: 66 additions & 48 deletions src/bin.rs
Original file line number Diff line number Diff line change
@@ -1,70 +1,88 @@
use browsercookie::{Attribute, Browser, CookieFinder};
use clap::{App, Arg};
use regex::Regex;
use clap::{Arg, App};
use browsercookie::{Browser, Browsercookies};

#[macro_use]
extern crate clap;

fn curl_output(bc: &Browsercookies, domain_regex: &Regex) {
print!("Cookie: {}", bc.to_header(domain_regex).unwrap());
async fn curl_output(cookie_finder: &CookieFinder) {
let cookie_jar = cookie_finder.find().await;
let cookie = cookie_jar.iter().last().expect("Cookie not found");
print!("Cookie: {}", cookie);
}

fn python_output(bc: &Browsercookies, domain_regex: &Regex) {
print!("{{'Cookie': '{}'}}", bc.to_header(domain_regex).unwrap());
async fn python_output(cookie_finder: &CookieFinder) {
let cookie_jar = cookie_finder.find().await;
let cookie = cookie_jar.iter().last().expect("Cookie not found");
print!("{{'Cookie': '{}'}}", cookie);
}

fn main() {
#[tokio::main]
async fn main() {
let matches = App::new("browsercookies")
.version(crate_version!())
.author(crate_authors!())
.about(crate_description!())
.arg(Arg::with_name("domain")
.short("d")
.long("domain")
.value_name("DOMAIN_REGEX")
.required(true)
.help("Sets a domain filter for cookies")
.takes_value(true))
.arg(Arg::with_name("browser")
.short("b")
.long("browser")
.value_name("BROWSER")
.multiple(true)
.default_value("firefox")
.help("Accepted values: firefox (only one can be provided)")
.takes_value(true))
.arg(Arg::with_name("name")
.short("n")
.long("name")
.conflicts_with("output")
.value_name("COOKIE_NAME")
.help("Specify a cookie name to output only that value")
.takes_value(true))
.arg(Arg::with_name("output")
.short("o")
.long("output")
.value_name("OUTPUT_FORMAT")
.help("Accepted values: curl,python (only one can be provided)")
.default_value("curl")
.takes_value(true))
.get_matches();
.version(crate_version!())
.author(crate_authors!())
.about(crate_description!())
.arg(
Arg::with_name("domain")
.short("d")
.long("domain")
.value_name("DOMAIN_REGEX")
.required(true)
.help("Sets a domain filter for cookies")
.takes_value(true),
)
.arg(
Arg::with_name("browser")
.short("b")
.long("browser")
.value_name("BROWSER")
.multiple(true)
.default_value("firefox")
.help("Accepted values: firefox (only one can be provided)")
.takes_value(true),
)
.arg(
Arg::with_name("name")
.short("n")
.long("name")
.conflicts_with("output")
.value_name("COOKIE_NAME")
.help("Specify a cookie name to output only that value")
.takes_value(true),
)
.arg(
Arg::with_name("output")
.short("o")
.long("output")
.value_name("OUTPUT_FORMAT")
.help("Accepted values: curl,python (only one can be provided)")
.default_value("curl")
.takes_value(true),
)
.get_matches();

let mut bc = Browsercookies::new();
let domain_regex = Regex::new(matches.value_of("domain").unwrap()).unwrap();

for b in matches.values_of("browser").unwrap() {
if b == "firefox" {
bc.from_browser(Browser::Firefox, &domain_regex).expect("Failed to get cookies from firefox");
let mut builder = CookieFinder::builder().with_regexp(domain_regex, Attribute::Domain);

for b in matches.values_of("browser").unwrap() {
if b == "firefox" {
builder = builder.with_browser(Browser::Firefox);
}
}

if let Some(cookie_name) = matches.value_of("name") {
print!("{}", bc.cj.get(cookie_name).expect("Cookie not present").value());
builder.build().find().await.iter().for_each(|c| {
if c.name() == cookie_name {
println!("{}", c.value());
}
});
} else {
match matches.value_of("output").unwrap() {
"curl" => curl_output(&bc, &domain_regex),
"python" => python_output(&bc, &domain_regex),
_ => ()
"curl" => curl_output(&builder.build()).await,
"python" => python_output(&builder.build()).await,
_ => (),
}
}
}
100 changes: 81 additions & 19 deletions src/firefox.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,22 @@ use byteorder::{LittleEndian, ReadBytesExt};
use cookie::{Cookie, CookieJar};
#[allow(unused_imports)]
use dirs::home_dir;
use futures::TryStreamExt;
use ini::Ini;
use lz4::block::decompress;
use memmap::MmapOptions;
use regex::Regex;
use serde_json::Value;
use sqlx::prelude::*;
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::SqliteConnection;
use std::error::Error;
use std::fs::File;
use std::io::Cursor;
use std::path::{Path, PathBuf};

use crate::errors::BrowsercookieError;
use crate::Attribute;

#[allow(non_snake_case)]
#[derive(Deserialize, Debug)]
Expand Down Expand Up @@ -80,7 +85,7 @@ fn get_default_profile_path(master_profile: &Path) -> Result<PathBuf, Box<dyn Er

for (sec, _) in &profiles_conf {
let section = profiles_conf
.section(sec.clone())
.section(sec)
.ok_or("Invalid profile section")?;
match section.get("Default").and(section.get("Path")) {
Some(path) => {
Expand All @@ -93,11 +98,44 @@ fn get_default_profile_path(master_profile: &Path) -> Result<PathBuf, Box<dyn Er
Ok(default_profile_path)
}

fn load_from_recovery(
async fn load_from_sqlite(
sqlite_path: &Path,
cookie_jar: &mut CookieJar,
domain_regex: &(Regex, Attribute),
) -> Result<(), Box<dyn Error>> {
let options = SqliteConnectOptions::new()
.filename(sqlite_path)
.read_only(true)
.immutable(true);
let mut conn = SqliteConnection::connect_with(&options)
.await
.expect("Could not connect to cookies.sqlite");
let mut query = sqlx::query("SELECT name, value, host from moz_cookies").fetch(&mut conn);

while let Some(row) = query.try_next().await? {
let name: String = row.get(0);
let value: String = row.get(1);
let host: String = row.get(2);

if domain_regex.0.is_match(&host) {
cookie_jar.add(
Cookie::build(name, value)
.domain(host)
.path("/")
.secure(false)
.http_only(false)
.finish(),
);
}
}
Ok(())
}

async fn load_from_recovery(
recovery_path: &Path,
bcj: &mut Box<CookieJar>,
domain_regex: &Regex,
) -> Result<bool, Box<dyn Error>> {
cookie_jar: &mut CookieJar,
regex_and_attribute: &(Regex, Attribute),
) -> Result<(), Box<dyn Error>> {
let recovery_file = File::open(recovery_path)?;
let recovery_mmap = unsafe { MmapOptions::new().map(&recovery_file)? };

Expand Down Expand Up @@ -126,8 +164,8 @@ fn load_from_recovery(
serde_json::from_value(c.clone()) as Result<MozCookie, serde_json::error::Error>
{
// println!("Loading for {}: {}={}", cookie.host, cookie.name, cookie.value);
if domain_regex.is_match(&cookie.host) {
bcj.add(
if regex_and_attribute.0.is_match(&cookie.host) {
cookie_jar.add(
Cookie::build(cookie.name, cookie.value)
.domain(cookie.host)
.path(cookie.path)
Expand All @@ -138,11 +176,14 @@ fn load_from_recovery(
}
}
}
Ok(true)
Ok(())
}

pub(crate) fn load(bcj: &mut Box<CookieJar>, domain_regex: &Regex) -> Result<(), Box<dyn Error>> {
// Returns a CookieJar on heap if following steps go right
pub(crate) async fn load(
cookie_jar: &mut CookieJar,
regex_and_attribute: &(Regex, Attribute),
) -> Result<(), Box<dyn Error>> {
// Returns a CookieJar if following steps go right
//
// 1. Get default profile path for firefox from master ini profiles config.
// 2. Load cookies from recovery json (sessionstore-backups/recovery.jsonlz4)
Expand All @@ -156,16 +197,19 @@ pub(crate) fn load(bcj: &mut Box<CookieJar>, domain_regex: &Regex) -> Result<(),

let profile_path = get_default_profile_path(&master_profile_path)?;

let mut recovery_path = profile_path;
let mut recovery_path = profile_path.clone();
recovery_path.push("sessionstore-backups/recovery.jsonlz4");

if !recovery_path.exists() {
return Err(Box::new(BrowsercookieError::InvalidCookieStore(
String::from("Firefox invalid cookie store"),
)));
if recovery_path.exists() {
load_from_recovery(&recovery_path, cookie_jar, regex_and_attribute).await?;
}

load_from_recovery(&recovery_path, bcj, domain_regex)?;
let mut sqlite_path = profile_path.clone();

if sqlite_path.exists() {
sqlite_path.push("cookies.sqlite");
load_from_sqlite(&sqlite_path, cookie_jar, regex_and_attribute).await?;
}

Ok(())
}
Expand All @@ -174,14 +218,15 @@ pub(crate) fn load(bcj: &mut Box<CookieJar>, domain_regex: &Regex) -> Result<(),
mod tests {
use super::*;

#[test]
fn test_recovery_load() {
#[tokio::test]
async fn test_recovery_load() {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("tests/resources/recovery.jsonlz4");
let mut bcj = Box::new(CookieJar::new());

let domain_re = Regex::new(".*").unwrap();
load_from_recovery(&path, &mut bcj, &domain_re)
load_from_recovery(&path, &mut bcj, &(domain_re, Attribute::Domain))
.await
.expect("Failed to load from firefox recovery json");

let c = bcj
Expand All @@ -195,6 +240,23 @@ mod tests {
assert_eq!(c.domain(), Some("addons.mozilla.org"));
}

#[tokio::test]
async fn test_sqlite_load() {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("tests/resources/Profiles/1qbuu7ux.default/cookies.sqlite");
let domain_re = Regex::new(".*").unwrap();
let mut bcj = Box::new(CookieJar::new());
load_from_sqlite(&path, &mut bcj, &(domain_re, Attribute::Domain))
.await
.unwrap();

let cookie = bcj.get("somename").unwrap();

assert_eq!(cookie.value(), "somevalue");
assert_eq!(cookie.path(), Some("/"));
assert_eq!(cookie.domain(), Some("somehost"));
}

#[test]
fn test_master_profile() {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
Expand Down
Loading

0 comments on commit 2776fe4

Please sign in to comment.