Skip to content

Commit

Permalink
move protobuf code to cedar-policy (#1452)
Browse files Browse the repository at this point in the history
Signed-off-by: Craig Disselkoen <cdiss@amazon.com>
  • Loading branch information
cdisselkoen authored Feb 11, 2025
1 parent b2f570f commit 3a0d233
Show file tree
Hide file tree
Showing 43 changed files with 2,683 additions and 2,677 deletions.
5 changes: 1 addition & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 0 additions & 6 deletions cedar-policy-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,6 @@ wasm-bindgen = { version = "0.2.97", optional = true }
# datetime extension requires chrono
chrono = { version = "0.4.38", optional = true, default-features = false}

# protobuf dependency
prost = { version = "0.13", optional = true }

[features]
# by default, enable all Cedar extensions
default = ["ipaddr", "decimal"]
Expand All @@ -60,12 +57,9 @@ test-util = []
# Experimental features.
partial-eval = []
wasm = ["serde-wasm-bindgen", "tsify", "wasm-bindgen"]
protobufs = ["dep:prost", "dep:prost-build"]

[build-dependencies]
lalrpop = "0.22.1"
# protobuf dependency
prost-build = { version = "0.13", optional = true }

[dev-dependencies]
cool_asserts = "2.0"
Expand Down
11 changes: 0 additions & 11 deletions cedar-policy-core/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@

fn main() {
generate_parsers();
#[cfg(feature = "protobufs")]
generate_schemas();
}

/// Reads parser grammar files (.lalrpop) and generates Rust modules
Expand All @@ -28,12 +26,3 @@ fn generate_parsers() {
.process_dir("src/parser/")
.expect("parser synth");
}

#[cfg(feature = "protobufs")]
/// Reads protobuf schema files (.proto) and generates Rust modules
fn generate_schemas() {
// PANIC SAFETY: static file compiled at build time
#[allow(clippy::expect_used)]
prost_build::compile_protos(&["./protobuf_schema/AST.proto"], &["./protobuf_schema"])
.expect("failed to compile `.proto` schema files");
}
6 changes: 0 additions & 6 deletions cedar-policy-core/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,6 @@

//! This module contains the AST datatypes.
#[cfg(feature = "protobufs")]
pub mod proto {
#![allow(missing_docs)]
include!(concat!(env!("OUT_DIR"), "/cedar_policy_core.rs"));
}

mod expr;
pub use expr::*;
mod entity;
Expand Down
19 changes: 0 additions & 19 deletions cedar-policy-core/src/ast/annotation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,25 +148,6 @@ impl AsRef<str> for Annotation {
}
}

#[cfg(feature = "protobufs")]
impl From<&crate::ast::proto::Annotation> for Annotation {
fn from(v: &crate::ast::proto::Annotation) -> Self {
Self {
val: v.val.clone().into(),
loc: None,
}
}
}

#[cfg(feature = "protobufs")]
impl From<&Annotation> for crate::ast::proto::Annotation {
fn from(v: &Annotation) -> Self {
Self {
val: v.val.to_string(),
}
}
}

#[cfg(feature = "arbitrary")]
impl<'a> arbitrary::Arbitrary<'a> for Annotation {
fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
Expand Down
186 changes: 11 additions & 175 deletions cedar-policy-core/src/ast/entity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,28 +100,6 @@ impl FromStr for EntityType {
}
}

#[cfg(feature = "protobufs")]
impl From<&proto::EntityType> for EntityType {
// PANIC SAFETY: experimental feature
#[allow(clippy::expect_used)]
fn from(v: &proto::EntityType) -> Self {
Self(Name::from(
v.name
.as_ref()
.expect("`as_ref()` for field that should exist"),
))
}
}

#[cfg(feature = "protobufs")]
impl From<&EntityType> for proto::EntityType {
fn from(v: &EntityType) -> Self {
Self {
name: Some(proto::Name::from(v.name())),
}
}
}

impl std::fmt::Display for EntityType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
Expand Down Expand Up @@ -255,33 +233,6 @@ impl<'a> arbitrary::Arbitrary<'a> for EntityUID {
}
}

#[cfg(feature = "protobufs")]
impl From<&proto::EntityUid> for EntityUID {
// PANIC SAFETY: experimental feature
#[allow(clippy::expect_used)]
fn from(v: &proto::EntityUid) -> Self {
Self {
ty: EntityType::from(
v.ty.as_ref()
.expect("`as_ref()` for field that should exist"),
),
eid: Eid::new(v.eid.clone()),
loc: None,
}
}
}

#[cfg(feature = "protobufs")]
impl From<&EntityUID> for proto::EntityUid {
fn from(v: &EntityUID) -> Self {
let eid_ref: &str = v.eid.as_ref();
Self {
ty: Some(proto::EntityType::from(&v.ty)),
eid: eid_ref.to_owned(),
}
}
}

/// The `Eid` type represents the id of an `Entity`, without the typename.
/// Together with the typename it comprises an `EntityUID`.
/// For example, in `User::"alice"`, the `Eid` is `alice`.
Expand Down Expand Up @@ -332,7 +283,7 @@ pub struct Entity {
/// UID
uid: EntityUID,

/// Internal BTreMap of attributes.
/// Internal BTreeMap of attributes.
/// We use a btreemap so that the keys have a deterministic order.
///
/// In the serialized form of `Entity`, attribute values appear as
Expand Down Expand Up @@ -399,10 +350,10 @@ impl Entity {
})
}

/// Create a new `Entity` with this UID, attributes, and ancestors (and no tags)
/// Create a new `Entity` with this UID, attributes, ancestors, and tags
///
/// Unlike in `Entity::new()`, in this constructor, attributes are expressed
/// as `PartialValue`.
/// Unlike in `Entity::new()`, in this constructor, attributes and tags are
/// expressed as `PartialValue`.
///
/// Callers should consider directly using [`Entity::new_with_attr_partial_value_serialized_as_expr`]
/// if they would call this method by first building a map, as it will
Expand All @@ -411,28 +362,31 @@ impl Entity {
uid: EntityUID,
attrs: impl IntoIterator<Item = (SmolStr, PartialValue)>,
ancestors: HashSet<EntityUID>,
tags: impl IntoIterator<Item = (SmolStr, PartialValue)>,
) -> Self {
Self::new_with_attr_partial_value_serialized_as_expr(
uid,
attrs.into_iter().map(|(k, v)| (k, v.into())).collect(),
ancestors,
tags.into_iter().map(|(k, v)| (k, v.into())).collect(),
)
}

/// Create a new `Entity` with this UID, attributes, and ancestors (and no tags)
/// Create a new `Entity` with this UID, attributes, ancestors, and tags
///
/// Unlike in `Entity::new()`, in this constructor, attributes are expressed
/// as `PartialValueSerializedAsExpr`.
/// Unlike in `Entity::new()`, in this constructor, attributes and tags are
/// expressed as `PartialValueSerializedAsExpr`.
pub fn new_with_attr_partial_value_serialized_as_expr(
uid: EntityUID,
attrs: BTreeMap<SmolStr, PartialValueSerializedAsExpr>,
ancestors: HashSet<EntityUID>,
tags: BTreeMap<SmolStr, PartialValueSerializedAsExpr>,
) -> Self {
Entity {
uid,
attrs,
ancestors,
tags: BTreeMap::new(),
tags,
}
}

Expand Down Expand Up @@ -660,94 +614,6 @@ impl std::fmt::Display for Entity {
}
}

#[cfg(feature = "protobufs")]
impl From<&proto::Entity> for Entity {
// PANIC SAFETY: experimental feature
#[allow(clippy::expect_used)]
fn from(v: &proto::Entity) -> Self {
let eval = RestrictedEvaluator::new(Extensions::none());

let attrs: BTreeMap<SmolStr, PartialValueSerializedAsExpr> = v
.attrs
.iter()
.map(|(key, value)| {
let pval = eval
.partial_interpret(
BorrowedRestrictedExpr::new(&Expr::from(value)).expect("RestrictedExpr"),
)
.expect("interpret on RestrictedExpr");
(key.into(), pval.into())
})
.collect();

let ancestors: HashSet<EntityUID> = v.ancestors.iter().map(EntityUID::from).collect();

let tags: BTreeMap<SmolStr, PartialValueSerializedAsExpr> = v
.tags
.iter()
.map(|(key, value)| {
let pval = eval
.partial_interpret(
BorrowedRestrictedExpr::new(&Expr::from(value)).expect("RestrictedExpr"),
)
.expect("interpret on RestrictedExpr");
(key.into(), pval.into())
})
.collect();

Self {
uid: EntityUID::from(
v.uid
.as_ref()
.expect("`as_ref()` for field that should exist"),
),
attrs,
ancestors,
tags,
}
}
}

#[cfg(feature = "protobufs")]
impl From<&Entity> for proto::Entity {
fn from(v: &Entity) -> Self {
let mut attrs: HashMap<String, proto::Expr> = HashMap::with_capacity(v.attrs.len());
for (key, value) in &v.attrs {
attrs.insert(
key.to_string(),
proto::Expr::from(&Expr::from(PartialValue::from(value.to_owned()))),
);
}

let mut ancestors: Vec<proto::EntityUid> = Vec::with_capacity(v.ancestors.len());
for ancestor in &v.ancestors {
ancestors.push(proto::EntityUid::from(ancestor));
}

let mut tags: HashMap<String, proto::Expr> = HashMap::with_capacity(v.tags.len());
for (key, value) in &v.tags {
tags.insert(
key.to_string(),
proto::Expr::from(&Expr::from(PartialValue::from(value.to_owned()))),
);
}

Self {
uid: Some(proto::EntityUid::from(&v.uid)),
attrs,
ancestors,
tags,
}
}
}

#[cfg(feature = "protobufs")]
impl From<&Arc<Entity>> for proto::Entity {
fn from(v: &Arc<Entity>) -> Self {
Self::from(v.as_ref())
}
}

/// `PartialValue`, but serialized as a `RestrictedExpr`.
///
/// (Extension values can't be directly serialized, but can be serialized as
Expand Down Expand Up @@ -861,36 +727,6 @@ mod test {
assert!(!euid.is_action());
}

#[cfg(feature = "protobufs")]
#[test]
fn round_trip_protobuf() {
let name = Name::from_normalized_str("B::C::D").unwrap();
let ety_specified = EntityType(name);
assert_eq!(
ety_specified,
EntityType::from(&proto::EntityType::from(&ety_specified))
);

let euid1 = EntityUID::with_eid("foo");
assert_eq!(euid1, EntityUID::from(&proto::EntityUid::from(&euid1)));

let euid2 = EntityUID::from_str("Foo::Action::\"view\"").unwrap();
assert_eq!(euid2, EntityUID::from(&proto::EntityUid::from(&euid2)));

let attrs = (1..=7)
.map(|id| (format!("{id}").into(), RestrictedExpr::val(true)))
.collect::<HashMap<SmolStr, _>>();
let entity = Entity::new(
r#"Foo::"bar""#.parse().unwrap(),
attrs,
HashSet::new(),
BTreeMap::new(),
Extensions::none(),
)
.unwrap();
assert_eq!(entity, Entity::from(&proto::Entity::from(&entity)));
}

#[test]
fn action_type_is_valid_id() {
assert!(Id::from_normalized_str(ACTION_ENTITY_TYPE).is_ok());
Expand Down
Loading

0 comments on commit 3a0d233

Please sign in to comment.