Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to diesel 2 #5002

Merged
merged 1 commit into from
Mar 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 58 additions & 20 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@ repository = "https://github.com/graphprotocol/graph-node"
license = "MIT OR Apache-2.0"

[workspace.dependencies]
diesel = { version = "2.1.3", features = ["postgres", "serde_json", "numeric", "r2d2", "chrono"] }
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }
diesel_derives = "2.1.2"
diesel-dynamic-schema = "0.2.1"
diesel_migrations = "2.1.0"
prost = "0.11.9"
prost-types = "0.11.9"
tonic = { version = "0.8.3", features = ["tls-roots", "gzip"] }
Expand Down
2 changes: 1 addition & 1 deletion chain/arweave/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ graph-runtime-wasm = { path = "../../runtime/wasm" }
graph-runtime-derive = { path = "../../runtime/derive" }

[dev-dependencies]
diesel = { version = "1.4.7", features = ["postgres", "serde_json", "numeric", "r2d2"] }
diesel = { workspace = true }
2 changes: 1 addition & 1 deletion chain/near/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ graph-runtime-wasm = { path = "../../runtime/wasm" }
graph-runtime-derive = { path = "../../runtime/derive" }

[dev-dependencies]
diesel = { version = "1.4.7", features = ["postgres", "serde_json", "numeric", "r2d2"] }
diesel = { workspace = true }
trigger-filters.path = "../../substreams/trigger-filters"
17 changes: 7 additions & 10 deletions graph/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,14 @@ anyhow = "1.0"
async-trait = "0.1.74"
async-stream = "0.3"
atomic_refcell = "0.1.13"
bigdecimal = { version = "0.1.0", features = ["serde"] }
# Make sure no newer version of bigdecmal library is used in diesel dependencies reflected in the Cargo.lock file.
# Automatic reneratiion of the Cargo.lock file would break compilation as only diesel dependencies would be updated.
# Using a higher version of bigdecimal in complete graph-node project would change the PoI, hence break the consensus.
bigdecimal = { version = "=0.1.2", features = ["serde"] }
bytes = "1.0.1"
cid = "0.11.0"
diesel = { version = "1.4.8", features = [
"postgres",
"serde_json",
"numeric",
"r2d2",
"chrono",
] }
diesel_derives = "1.4"
diesel = { workspace = true }
diesel_derives = { workspace = true }
chrono = "0.4.31"
envconfig = "0.10.0"
Inflector = "0.11.3"
Expand All @@ -32,7 +29,7 @@ lru_time_cache = "0.11"
graphql-parser = "0.4.0"
humantime = "2.1.0"
lazy_static = "1.4.0"
num-bigint = { version = "^0.2.6", features = ["serde"] }
num-bigint = { version = "=0.2.6", features = ["serde"] }
num-traits = "=0.2.17"
rand = "0.8.4"
regex = "1.5.4"
Expand Down
18 changes: 8 additions & 10 deletions graph/src/blockchain/types.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
use anyhow::anyhow;
use chrono::{DateTime, Utc};
use diesel::deserialize::FromSql;
use diesel::pg::Pg;
use diesel::serialize::Output;
use diesel::serialize::{Output, ToSql};
use diesel::sql_types::Timestamptz;
use diesel::sql_types::{Bytea, Nullable, Text};
use diesel::types::FromSql;
use diesel::types::ToSql;
use diesel_derives::{AsExpression, FromSqlRow};
use diesel_derives::FromSqlRow;
use std::convert::TryFrom;
use std::io::Write;
use std::time::Duration;
use std::{fmt, str::FromStr};
use web3::types::{Block, H256};
Expand All @@ -20,7 +18,7 @@ use crate::util::stable_hash_glue::{impl_stable_hash, AsBytes};
use crate::{cheap_clone::CheapClone, components::store::BlockNumber};

/// A simple marker for byte arrays that are really block hashes
#[derive(Clone, Default, PartialEq, Eq, Hash, AsExpression, FromSqlRow)]
#[derive(Clone, Default, PartialEq, Eq, Hash, FromSqlRow)]
pub struct BlockHash(pub Box<[u8]>);

impl_stable_hash!(BlockHash(transparent: AsBytes));
Expand Down Expand Up @@ -95,23 +93,23 @@ impl FromStr for BlockHash {
}

impl FromSql<Nullable<Text>, Pg> for BlockHash {
fn from_sql(bytes: Option<&[u8]>) -> diesel::deserialize::Result<Self> {
fn from_sql(bytes: diesel::pg::PgValue) -> diesel::deserialize::Result<Self> {
let s = <String as FromSql<Text, Pg>>::from_sql(bytes)?;
BlockHash::try_from(s.as_str())
.map_err(|e| format!("invalid block hash `{}`: {}", s, e).into())
}
}

impl FromSql<Text, Pg> for BlockHash {
fn from_sql(bytes: Option<&[u8]>) -> diesel::deserialize::Result<Self> {
fn from_sql(bytes: diesel::pg::PgValue) -> diesel::deserialize::Result<Self> {
let s = <String as FromSql<Text, Pg>>::from_sql(bytes)?;
BlockHash::try_from(s.as_str())
.map_err(|e| format!("invalid block hash `{}`: {}", s, e).into())
}
}

impl FromSql<Bytea, Pg> for BlockHash {
fn from_sql(bytes: Option<&[u8]>) -> diesel::deserialize::Result<Self> {
fn from_sql(bytes: diesel::pg::PgValue) -> diesel::deserialize::Result<Self> {
let bytes = <Vec<u8> as FromSql<Bytea, Pg>>::from_sql(bytes)?;
Ok(BlockHash::from(bytes))
}
Expand Down Expand Up @@ -396,7 +394,7 @@ impl TryFrom<&Value> for BlockTime {
}

impl ToSql<Timestamptz, Pg> for BlockTime {
fn to_sql<W: Write>(&self, out: &mut Output<W, Pg>) -> diesel::serialize::Result {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> diesel::serialize::Result {
<DateTime<Utc> as ToSql<Timestamptz, Pg>>::to_sql(&self.0, out)
}
}
8 changes: 8 additions & 0 deletions graph/src/components/store/err.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ pub enum StoreError {
UnknownShard(String),
#[error("Fulltext search not yet deterministic")]
FulltextSearchNonDeterministic,
#[error("Fulltext search column missing configuration")]
FulltextColumnMissingConfig,
#[error("operation was canceled")]
Canceled,
#[error("database unavailable")]
Expand All @@ -68,6 +70,8 @@ pub enum StoreError {
UnsupportedDeploymentSchemaVersion(i32),
#[error("pruning failed: {0}")]
PruneFailure(String),
#[error("unsupported filter `{0}` for value `{1}`")]
UnsupportedFilter(String, String),
}

// Convenience to report a constraint violation
Expand Down Expand Up @@ -110,6 +114,7 @@ impl Clone for StoreError {
Self::DeploymentNotFound(arg0) => Self::DeploymentNotFound(arg0.clone()),
Self::UnknownShard(arg0) => Self::UnknownShard(arg0.clone()),
Self::FulltextSearchNonDeterministic => Self::FulltextSearchNonDeterministic,
Self::FulltextColumnMissingConfig => Self::FulltextColumnMissingConfig,
Self::Canceled => Self::Canceled,
Self::DatabaseUnavailable => Self::DatabaseUnavailable,
Self::DatabaseDisabled => Self::DatabaseDisabled,
Expand All @@ -120,6 +125,9 @@ impl Clone for StoreError {
Self::UnsupportedDeploymentSchemaVersion(arg0.clone())
}
Self::PruneFailure(arg0) => Self::PruneFailure(arg0.clone()),
Self::UnsupportedFilter(arg0, arg1) => {
Self::UnsupportedFilter(arg0.clone(), arg1.clone())
}
}
}
}
Expand Down
28 changes: 27 additions & 1 deletion graph/src/data/store/id.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
//! Types and helpers to deal with entity IDs which support a subset of the
//! types that more general values support
use anyhow::{anyhow, Context, Error};
use diesel::{
pg::Pg,
query_builder::AstPass,
sql_types::{BigInt, Binary, Text},
QueryResult,
};
use stable_hash::{StableHash, StableHasher};
use std::convert::TryFrom;
use std::fmt;
Expand Down Expand Up @@ -299,6 +305,14 @@ impl<'a> IdRef<'a> {
IdRef::Int8(_) => IdType::Int8,
}
}

pub fn push_bind_param<'b>(&'b self, out: &mut AstPass<'_, 'b, Pg>) -> QueryResult<()> {
match self {
IdRef::String(s) => out.push_bind_param::<Text, _>(*s),
IdRef::Bytes(b) => out.push_bind_param::<Binary, _>(*b),
IdRef::Int8(i) => out.push_bind_param::<BigInt, _>(i),
}
}
}

/// A homogeneous list of entity ids, i.e., all ids in the list are of the
Expand Down Expand Up @@ -448,14 +462,26 @@ impl IdList {
}
}

pub fn index(&self, index: usize) -> IdRef<'_> {
pub fn index<'b>(&'b self, index: usize) -> IdRef<'b> {
match self {
IdList::String(ids) => IdRef::String(&ids[index]),
IdList::Bytes(ids) => IdRef::Bytes(ids[index].as_slice()),
IdList::Int8(ids) => IdRef::Int8(ids[index]),
}
}

pub fn bind_entry<'b>(
&'b self,
index: usize,
out: &mut AstPass<'_, 'b, Pg>,
) -> QueryResult<()> {
match self {
IdList::String(ids) => out.push_bind_param::<Text, _>(&ids[index]),
IdList::Bytes(ids) => out.push_bind_param::<Binary, _>(ids[index].as_slice()),
IdList::Int8(ids) => out.push_bind_param::<BigInt, _>(&ids[index]),
}
}

pub fn first(&self) -> Option<IdRef<'_>> {
if self.len() > 0 {
Some(self.index(0))
Expand Down
Loading
Loading