Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: add firebase_sync bin to periodically since events with aw-firebase-leaderboard #488

Draft
wants to merge 21 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
449 changes: 416 additions & 33 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,6 @@ members = [
"aw-server",
"aw-sync",
"aw-query",
"aw-firebase-sync",
]
resolver = "2"
6 changes: 6 additions & 0 deletions aw-client-rust/src/blocking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,12 @@ impl AwClient {
stop: Option<DateTime<Utc>>,
limit: Option<u64>
);
proxy_method!(
query,
Vec<serde_json::Value>,
query: &str,
timeperiods: Vec<(DateTime<Utc>, DateTime<Utc>)>
);
proxy_method!(insert_event, (), bucketname: &str, event: &Event);
proxy_method!(insert_events, (), bucketname: &str, events: Vec<Event>);
proxy_method!(
Expand Down
28 changes: 27 additions & 1 deletion aw-client-rust/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use std::vec::Vec;
use std::{collections::HashMap, error::Error};

use chrono::{DateTime, Utc};
use serde_json::Map;
use serde_json::{json, Map};

pub use aw_models::{Bucket, BucketMetadata, Event};

Expand Down Expand Up @@ -98,6 +98,32 @@ impl AwClient {
Ok(())
}

pub async fn query(
&self,
query: &str,
timeperiods: Vec<(DateTime<Utc>, DateTime<Utc>)>,
) -> Result<Vec<serde_json::Value>, reqwest::Error> {
let url = reqwest::Url::parse(format!("{}/api/0/query", self.baseurl).as_str()).unwrap();

// Format timeperiods as ISO8601 strings, separated by /
let timeperiods_str: Vec<String> = timeperiods
.iter()
.map(|(start, stop)| (start.to_rfc3339(), stop.to_rfc3339()))
.map(|(start, stop)| format!("{}/{}", start, stop))
.collect();
// Result is a sequence, one element per timeperiod
self.client
.post(url)
.json(&json!({
"query": query.split('\n').collect::<Vec<&str>>(),
"timeperiods": timeperiods_str,
}))
.send()
.await?
.json()
.await
}

pub async fn get_events(
&self,
bucketname: &str,
Expand Down
28 changes: 21 additions & 7 deletions aw-client-rust/tests/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,19 +86,17 @@ mod test {
println!("Buckets: {buckets:?}");
let mut event = Event {
id: None,
timestamp: DateTime::from_utc(
timestamp: DateTime::from_naive_utc_and_offset(
DateTime::parse_from_rfc3339("2017-12-30T01:00:00+00:00")
.unwrap()
.naive_utc(),
Utc,
),
duration: Duration::seconds(0),
.naive_utc(), Utc),
duration: Duration::try_seconds(0).unwrap(),
data: Map::new(),
};
println!("{event:?}");
client.insert_event(&bucketname, &event).unwrap();
// Ugly way to create a UTC from timestamp, see https://github.com/chronotope/chrono/issues/263
event.timestamp = DateTime::from_utc(
event.timestamp = DateTime::from_naive_utc_and_offset(
DateTime::parse_from_rfc3339("2017-12-30T01:00:01+00:00")
.unwrap()
.naive_utc(),
Expand All @@ -108,7 +106,23 @@ mod test {

let events = client.get_events(&bucketname, None, None, None).unwrap();
println!("Events: {events:?}");
assert!(events[0].duration == Duration::seconds(1));
assert!(events[0].duration == Duration::try_seconds(1).unwrap());

// Query
let query = format!(
"events = query_bucket(\"{}\");
RETURN = events;",
bucket.id
);
let start: DateTime<Utc> = DateTime::parse_from_rfc3339("1996-12-19T00:00:00-08:00")
.unwrap()
.into();
let end: DateTime<Utc> = DateTime::parse_from_rfc3339("2020-12-19T00:00:00-08:00")
.unwrap()
.into();
let timeperiods = (start, end);
let query_result = client.query(&query, vec![timeperiods]).unwrap();
println!("Query result: {query_result:?}");

client
.delete_event(&bucketname, events[0].id.unwrap())
Expand Down
2 changes: 1 addition & 1 deletion aw-datastore/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ impl DatastoreWorker {
response_sender.respond(response);

let now: DateTime<Utc> = Utc::now();
let commit_interval_passed: bool = (now - last_commit_time) > Duration::seconds(15);
let commit_interval_passed: bool = (now - last_commit_time) > Duration::try_seconds(15).unwrap();
if self.commit
|| commit_interval_passed
|| self.uncommitted_events > 100
Expand Down
32 changes: 16 additions & 16 deletions aw-datastore/tests/datastore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ mod datastore_tests {
Some(created) => {
let now = Utc::now();
assert!(created <= now);
assert!(created > now - Duration::seconds(10));
assert!(created > now - Duration::try_seconds(10).unwrap());
}
};

Expand All @@ -102,7 +102,7 @@ mod datastore_tests {
Some(created) => {
let now = Utc::now();
assert!(created <= now);
assert!(created > now - Duration::seconds(10));
assert!(created > now - Duration::try_seconds(10).unwrap());
}
};

Expand All @@ -129,7 +129,7 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
let mut e2 = e1.clone();
Expand Down Expand Up @@ -157,7 +157,7 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
let mut e2 = e1.clone();
Expand Down Expand Up @@ -224,16 +224,16 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(100),
duration: Duration::try_seconds(100).unwrap(),
data: json_map! {"key": json!("value")},
};

let event_list = [e1];
ds.insert_events(&bucket.id, &event_list).unwrap();

info!("Get event that covers queried timeperiod");
let query_start = now + Duration::seconds(1);
let query_end = query_start + Duration::seconds(1);
let query_start = now + Duration::try_seconds(1).unwrap();
let query_end = query_start + Duration::try_seconds(1).unwrap();
let fetched_events_limit = ds
.get_events(&bucket.id, Some(query_start), Some(query_end), Some(1))
.unwrap();
Expand All @@ -256,11 +256,11 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
let mut e2 = e1.clone();
e2.timestamp += Duration::seconds(1);
e2.timestamp += Duration::try_seconds(1).unwrap();

let event_list = [e1.clone(), e2.clone()];

Expand Down Expand Up @@ -308,7 +308,7 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
let mut e2 = e1.clone();
Expand All @@ -334,14 +334,14 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
let mut e2 = e1.clone();
e2.timestamp += Duration::seconds(1);
e2.timestamp += Duration::try_seconds(1).unwrap();

let mut e_diff_data = e2.clone();
e_diff_data.timestamp += Duration::seconds(1);
e_diff_data.timestamp += Duration::try_seconds(1).unwrap();
e_diff_data.data = json_map! {"key": json!("other value")};

// First event
Expand All @@ -358,7 +358,7 @@ mod datastore_tests {
let fetched_events = ds.get_events(&bucket.id, None, None, None).unwrap();
assert_eq!(fetched_events.len(), 1);
assert_eq!(fetched_events[0].timestamp, e1.timestamp);
assert_eq!(fetched_events[0].duration, Duration::seconds(1));
assert_eq!(fetched_events[0].duration, Duration::try_seconds(1).unwrap());
assert_eq!(fetched_events[0].data, e1.data);
assert_eq!(fetched_events[0].id, e1.id);
let e2 = &fetched_events[0];
Expand All @@ -383,7 +383,7 @@ mod datastore_tests {
let e = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
let mut e1 = e.clone();
Expand Down Expand Up @@ -451,7 +451,7 @@ mod datastore_tests {
let e1 = Event {
id: None,
timestamp: Utc::now(),
duration: Duration::seconds(0),
duration: Duration::try_seconds(0).unwrap(),
data: json_map! {"key": json!("value")},
};
{
Expand Down
19 changes: 19 additions & 0 deletions aw-firebase-sync/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
[package]
name = "aw-firebase-sync"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
chrono = "0.4.35"
tokio = { version = "1", features = ["full"] }
aw-client-rust = { path = "../aw-client-rust" }
aw-models = { path = "../aw-models" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.70"
serde_yaml = "0.8.18"
reqwest = { version = "0.12.4" , features = ["json"] }
dirs = "5.0.1"
tracing = "0.1.26"
tracing-subscriber = "0.3.18"
10 changes: 10 additions & 0 deletions aw-firebase-sync/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
aw-firebase-sync
================

Firebase sync for ActivityWatch [leaderboard](https://github.com/ActivityWatch/aw-firebase-leaderboard).

This is exports screentime data from ActivityWatch and uploads it to firebase.

## Status

Still in early development, not ready for use yet.
2 changes: 2 additions & 0 deletions aw-firebase-sync/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# you can get your api key from activitywatch leaderboard
apikey: your-api-key
114 changes: 114 additions & 0 deletions aw-firebase-sync/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
use aw_client_rust::AwClient;
use chrono::Local;
use dirs::config_dir;
use reqwest;
use serde_json::{json, Value};
use serde_yaml;
use std::env;
use std::fs::{DirBuilder, File};
use std::io::prelude::*;
use tracing::info;
use tracing_subscriber;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::fmt::init();
let args: Vec<String> = env::args().collect();
let mut port: u16 = 5600;
let mut testing = false;
if args.len() > 1 {
for idx in 1..args.len() {
if args[idx] == "--port" {
port = args[idx + 1].parse().expect("Invalid port number");
break;
}
if args[idx] == "--testing" {
testing = true;
}
if args[idx] == "--help" {
println!("Usage: aw-firebase-sync [--testing] [--port PORT] [--help]");
return Ok(());
}
}
}
let aw_client = AwClient::new("localhost", port, "aw-firebase-sync").unwrap();

let path = config_dir()
.map(|mut path| {
path.push("activitywatch");
path.push("aw-firebase-sync");
path.push("config.yaml");
path
})
.unwrap();

if !path.exists() {
DirBuilder::new()
.recursive(true)
.create(path.as_path().parent().expect("Unable to get parent path"))
.expect("Unable to create config directory");
let mut file = File::create(&path).expect("Unable to create file");
file.write_all(b"apikey: your-api-key\n")
.expect("Unable to write to file");
panic!("Please set your API key at {:?}", path.to_str().unwrap());
}

let mut file = File::open(path).expect("Unable to open file");
let mut contents = String::new();
file.read_to_string(&mut contents)
.expect("Unable to read file");
let yaml: Value =
serde_yaml::from_str(&contents).expect("Failed parsing yaml from config file");
let apikey = yaml["apikey"]
.as_str()
.expect("unable to get api key from config file");
if apikey == "your-api-key" || apikey == "" {
panic!("Please set your API key in the config.yaml file");
}


let query = "
events = flood(query_bucket(find_bucket(\"aw-watcher-window_\")));
not_afk = flood(query_bucket(find_bucket(\"aw-watcher-afk_\")));
not_afk = filter_keyvals(not_afk, \"status\", [\"not-afk\"]);
events = filter_period_intersect(events, not_afk);
events = categorize(events, [[[\"Work\"], {\"type\": \"regex\", \"regex\": \"aw|ActivityWatch\", \"ignore_case\": true}]]);
events = filter_keyvals(events, \"$category\", [[\"Work\"]]);
RETURN = events;
";

let firebase_url = if testing {
"http://localhost:5001/aw-mockup/us-central1/uploadData"
} else {
"https://us-central1-aw-mockup.cloudfunctions.net/uploadData"
};

let firebase_client = reqwest::Client::new();

loop {
let start = Local::now().to_utc() - chrono::Duration::minutes(5);
let end = Local::now().to_utc();
let timeperiods = vec![(start, end)];

let query_result = aw_client
.query(&query, timeperiods)
.await
.expect("Failed to query data");
let query_data =
serde_json::to_string(&query_result[0]).expect("Failed to serialize query data");
let payload = json!({
"apiKey": apikey,
"data": query_data
});
let response = firebase_client
.post(firebase_url)
.json(&payload)
.send()
.await?
.json::<Value>()
.await?;
info!("Response: {:?}", response);
std::thread::sleep(std::time::Duration::from_secs(300));
}
// Ok(())
}
Empty file added aw-firebase-sync/test-sync.sh
Empty file.
Loading