Skip to content

Commit

Permalink
Merge pull request #20 from probably-nothing-labs/update-version
Browse files Browse the repository at this point in the history
change package version and update name
  • Loading branch information
emgeee authored Aug 15, 2024
2 parents 5127caf + e774b20 commit 8a73819
Show file tree
Hide file tree
Showing 8 changed files with 23 additions and 23 deletions.
10 changes: 5 additions & 5 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ edition = "2021"
homepage = "https://github.com/probably-nothing-labs/denormalized"
license = "Apache-2.0"
readme = "README.md"
repository = "https://github.com/probably-nothing-labs/denormalized"
version = "0.1.0"
repository = "https://github.com/probably-nothing-labs/denormalized.git"
version = "0.0.1"
description = "Embeddable stream processing engine"

[workspace.dependencies]
df-streams-core = { path = "crates/core" }
denormalized = { path = "crates/core" }
datafusion = "41.0.0"

arrow = { version = "52.0.0", features = ["prettyprint"] }
Expand Down
2 changes: 1 addition & 1 deletion crates/core/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "df-streams-core"
name = "denormalized"
version = { workspace = true }
edition = { workspace = true }

Expand Down
4 changes: 2 additions & 2 deletions examples/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
[package]
name = "df-streams-examples"
name = "denormalized-examples"
version = { workspace = true }
edition = { workspace = true }
publish = false

[dependencies]
datafusion = { workspace = true }

df-streams-core = { workspace = true }
denormalized = { workspace = true }

arrow = { workspace = true }
arrow-schema = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion examples/examples/emit_measurements.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use rdkafka::config::ClientConfig;
use rdkafka::producer::FutureRecord;
use rdkafka::util::Timeout;

use df_streams_examples::Measurment;
use denormalized_examples::Measurment;

/// This script emits test data to a kafka cluster
///
Expand Down
6 changes: 3 additions & 3 deletions examples/examples/kafka_rideshare.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ use datafusion::functions_aggregate::count::count;
use datafusion::functions_aggregate::expr_fn::{max, min};
use datafusion::logical_expr::col;

use df_streams_core::context::Context;
use df_streams_core::datasource::kafka::{ConnectionOpts, KafkaTopicBuilder};
use df_streams_core::physical_plan::utils::time::TimestampUnit;
use denormalized::context::Context;
use denormalized::datasource::kafka::{ConnectionOpts, KafkaTopicBuilder};
use denormalized::physical_plan::utils::time::TimestampUnit;

use std::time::Duration;
use tracing_subscriber::{fmt::format::FmtSpan, FmtSubscriber};
Expand Down
8 changes: 4 additions & 4 deletions examples/examples/simple_aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ use datafusion::functions_aggregate::count::count;
use datafusion::functions_aggregate::expr_fn::{max, min};
use datafusion::logical_expr::{col, lit};

use df_streams_core::context::Context;
use df_streams_core::datasource::kafka::{ConnectionOpts, KafkaTopicBuilder};
use df_streams_core::physical_plan::utils::time::TimestampUnit;
use denormalized::context::Context;
use denormalized::datasource::kafka::{ConnectionOpts, KafkaTopicBuilder};
use denormalized::physical_plan::utils::time::TimestampUnit;

use df_streams_examples::get_sample_json;
use denormalized_examples::get_sample_json;

/// Demonstrates a simple stream aggregate job on data generated via the `emit_measurements.rs`
/// example script.
Expand Down
8 changes: 4 additions & 4 deletions examples/examples/stream_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ use datafusion::error::Result;
use datafusion::functions_aggregate::average::avg;
use datafusion::logical_expr::col;

use df_streams_core::context::Context;
use df_streams_core::datasource::kafka::{ConnectionOpts, KafkaTopicBuilder};
use df_streams_core::physical_plan::utils::time::TimestampUnit;
use denormalized::context::Context;
use denormalized::datasource::kafka::{ConnectionOpts, KafkaTopicBuilder};
use denormalized::physical_plan::utils::time::TimestampUnit;

use df_streams_examples::get_sample_json;
use denormalized_examples::get_sample_json;

/// Demonstrates a simple stream join on data generated via the `emit_measurements.rs`
/// example script.
Expand Down

0 comments on commit 8a73819

Please sign in to comment.