Skip to content

Commit

Permalink
Merge pull request #491 from supabase/or/release-1-5-0
Browse files Browse the repository at this point in the history
Update version for 1.5.0 release
  • Loading branch information
olirice committed Feb 9, 2024
2 parents 339521a + 8d130ad commit 65b16a4
Show file tree
Hide file tree
Showing 9 changed files with 249 additions and 177 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "pg_graphql"
version = "1.4.4"
version = "1.5.0"
edition = "2021"

[lib]
Expand Down
4 changes: 4 additions & 0 deletions docs/changelog.md
Expand Up @@ -64,4 +64,8 @@
## 1.4.4
- bugfix: function returning a noncompliant view's type no longer breaks introspection

## 1.5.0
- feature: `first`/`offset` based pagination
- feature: improved descriptions for all internal error states

## master
242 changes: 140 additions & 102 deletions src/builder.rs

Large diffs are not rendered by default.

46 changes: 29 additions & 17 deletions src/graphql.rs
Expand Up @@ -133,7 +133,10 @@ impl __Schema {
column_names = &fkey.referenced_table_meta.column_names;
}

let table: &Arc<Table> = self.context.get_table_by_oid(table_ref.oid).unwrap();
let table: &Arc<Table> = self
.context
.get_table_by_oid(table_ref.oid)
.expect("failed to get table by oid");

let is_inflection_on = self.inflect_names(table.schema_oid);

Expand Down Expand Up @@ -1123,6 +1126,15 @@ pub struct FilterTypeType {
pub schema: Arc<__Schema>,
}

impl FilterTypeType {
fn entity_name(&self) -> String {
match &self.entity {
FilterableType::Scalar(s) => s.name().expect("scalar name should exist"),
FilterableType::Enum(e) => e.name().expect("enum type name should exist"),
}
}
}

#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct FilterEntityType {
pub table: Arc<Table>,
Expand Down Expand Up @@ -1340,7 +1352,11 @@ fn function_args(schema: &Arc<__Schema>, func: &Arc<Function>) -> Vec<__InputVal
if matches!(t.category, TypeCategory::Pseudo) {
None
} else {
Some((t, arg_name.unwrap(), arg_default))
Some((
t,
arg_name.expect("function arg name should exist"),
arg_default,
))
}
}
None => None,
Expand Down Expand Up @@ -1713,7 +1729,7 @@ impl ___Type for NodeInterfaceType {
}

fn possible_types(&self) -> Option<Vec<__Type>> {
let node_interface_name = self.name().unwrap();
let node_interface_name = self.name().expect("node interface type name should exist");

let mut possible_types = vec![];

Expand All @@ -1723,8 +1739,10 @@ impl ___Type for NodeInterfaceType {
.sorted_by(|a, b| a.name().cmp(&b.name()))
{
let type_interfaces: Vec<__Type> = type_.interfaces().unwrap_or(vec![]);
let interface_names: Vec<String> =
type_interfaces.iter().map(|x| x.name().unwrap()).collect();
let interface_names: Vec<String> = type_interfaces
.iter()
.map(|x| x.name().expect("type interface name should exist"))
.collect();
if interface_names.contains(&node_interface_name) {
possible_types.push(type_)
}
Expand Down Expand Up @@ -2076,7 +2094,7 @@ impl ___Type for NodeType {
if foreign_table.is_none() {
continue;
}
let foreign_table = foreign_table.unwrap();
let foreign_table = foreign_table.expect("foreign table should exist");
if !self
.schema
.graphql_table_select_types_are_valid(foreign_table)
Expand Down Expand Up @@ -2125,7 +2143,7 @@ impl ___Type for NodeType {
if foreign_table.is_none() {
continue;
}
let foreign_table = foreign_table.unwrap();
let foreign_table = foreign_table.expect("foreign table should exist");
if !self
.schema
.graphql_table_select_types_are_valid(foreign_table)
Expand Down Expand Up @@ -3354,10 +3372,7 @@ impl ___Type for FilterTypeType {
}

fn name(&self) -> Option<String> {
match &self.entity {
FilterableType::Scalar(s) => Some(format!("{}Filter", s.name().unwrap())),
FilterableType::Enum(e) => Some(format!("{}Filter", e.name().unwrap())),
}
Some(format!("{}Filter", self.entity_name()))
}

fn fields(&self, _include_deprecated: bool) -> Option<Vec<__Field>> {
Expand All @@ -3367,10 +3382,7 @@ impl ___Type for FilterTypeType {
fn description(&self) -> Option<String> {
Some(format!(
"Boolean expression comparing fields on type \"{}\"",
match &self.entity {
FilterableType::Scalar(s) => s.name().unwrap(),
FilterableType::Enum(e) => e.name().unwrap(),
}
self.entity_name()
))
}

Expand Down Expand Up @@ -3853,15 +3865,15 @@ pub struct __Schema {
#[cached(
type = "SizedCache<String, HashMap<String, __Type>>",
create = "{ SizedCache::with_size(200) }",
convert = r#"{ serde_json::ser::to_string(&schema.context.config).unwrap() }"#,
convert = r#"{ serde_json::ser::to_string(&schema.context.config).expect("schema config should be a string") }"#,
sync_writes = true
)]
pub fn type_map(schema: &__Schema) -> HashMap<String, __Type> {
let tmap: HashMap<String, __Type> = schema
.types()
.into_iter()
.filter(|x| x.name().is_some())
.map(|x| (x.name().unwrap(), x))
.map(|x| (x.name().expect("type should have a name"), x))
.collect();
tmap
}
Expand Down
3 changes: 2 additions & 1 deletion src/lib.rs
Expand Up @@ -62,7 +62,8 @@ fn resolve(
}
};

let value: serde_json::Value = serde_json::to_value(response).unwrap();
let value: serde_json::Value =
serde_json::to_value(response).expect("failed to convert response into json");

pgrx::JsonB(value)
}
Expand Down
84 changes: 43 additions & 41 deletions src/parser_util.rs
Expand Up @@ -577,54 +577,56 @@ pub fn validate_arg_from_type(type_: &__Type, value: &gson::Value) -> Result<gso
Scalar::Opaque => value.clone(),
}
}
__Type::Enum(enum_) => match value {
GsonValue::Absent => value.clone(),
GsonValue::Null => value.clone(),
GsonValue::String(user_input_string) => {
let matches_enum_value = enum_
.enum_values(true)
.into_iter()
.flatten()
.find(|x| x.name().as_str() == user_input_string);
match matches_enum_value {
Some(_) => {
match &enum_.enum_ {
EnumSource::Enum(e) => e
.directives
.mappings
.as_ref()
// Use mappings if available and mapped
.and_then(|mappings| mappings.get_by_right(user_input_string))
.map(|val| GsonValue::String(val.clone()))
.unwrap_or_else(|| value.clone()),
EnumSource::FilterIs => value.clone(),
__Type::Enum(enum_) => {
let enum_name = enum_.name().expect("enum type should have a name");
match value {
GsonValue::Absent => value.clone(),
GsonValue::Null => value.clone(),
GsonValue::String(user_input_string) => {
let matches_enum_value = enum_
.enum_values(true)
.into_iter()
.flatten()
.find(|x| x.name().as_str() == user_input_string);
match matches_enum_value {
Some(_) => {
match &enum_.enum_ {
EnumSource::Enum(e) => e
.directives
.mappings
.as_ref()
// Use mappings if available and mapped
.and_then(|mappings| mappings.get_by_right(user_input_string))
.map(|val| GsonValue::String(val.clone()))
.unwrap_or_else(|| value.clone()),
EnumSource::FilterIs => value.clone(),
}
}
}
None => {
return Err(format!("Invalid input for {} type", enum_.name().unwrap()))
None => return Err(format!("Invalid input for {} type", enum_name)),
}
}
_ => return Err(format!("Invalid input for {} type", enum_name)),
}
_ => return Err(format!("Invalid input for {} type", enum_.name().unwrap())),
},
__Type::OrderBy(enum_) => match value {
GsonValue::Absent => value.clone(),
GsonValue::Null => value.clone(),
GsonValue::String(user_input_string) => {
let matches_enum_value = enum_
.enum_values(true)
.into_iter()
.flatten()
.find(|x| x.name().as_str() == user_input_string);
match matches_enum_value {
Some(_) => value.clone(),
None => {
return Err(format!("Invalid input for {} type", enum_.name().unwrap()))
}
__Type::OrderBy(enum_) => {
let enum_name = enum_.name().expect("order by type should have a name");
match value {
GsonValue::Absent => value.clone(),
GsonValue::Null => value.clone(),
GsonValue::String(user_input_string) => {
let matches_enum_value = enum_
.enum_values(true)
.into_iter()
.flatten()
.find(|x| x.name().as_str() == user_input_string);
match matches_enum_value {
Some(_) => value.clone(),
None => return Err(format!("Invalid input for {} type", enum_name)),
}
}
_ => return Err(format!("Invalid input for {} type", enum_name)),
}
_ => return Err(format!("Invalid input for {} type", enum_.name().unwrap())),
},
}
__Type::List(list_type) => {
let inner_type: __Type = *list_type.type_.clone();
match value {
Expand Down
14 changes: 8 additions & 6 deletions src/resolve.rs
Expand Up @@ -157,10 +157,11 @@ where
let query_type = schema_type.query_type();
let map = field_map(&query_type);

let query_type_name = query_type.name().expect("query type should have a name");
let selections = match normalize_selection_set(
&selection_set,
&fragment_definitions,
&query_type.name().unwrap(),
&query_type_name,
variables,
&query_type,
) {
Expand Down Expand Up @@ -196,8 +197,7 @@ where
res_errors.push(ErrorMessage {
message: format!(
"Unknown field {:?} on type {}",
selection.name,
query_type.name().unwrap()
selection.name, query_type_name
),
});
}
Expand Down Expand Up @@ -376,10 +376,13 @@ where

let map = field_map(&mutation_type);

let mutation_type_name = mutation_type
.name()
.expect("mutation type should have a name");
let selections = match normalize_selection_set(
&selection_set,
&fragment_definitions,
&mutation_type.name().unwrap(),
&mutation_type_name,
variables,
&mutation_type,
) {
Expand Down Expand Up @@ -409,8 +412,7 @@ where
conn = match maybe_field_def {
None => Err(format!(
"Unknown field {:?} on type {}",
selection.name,
mutation_type.name().unwrap()
selection.name, mutation_type_name
))?,
Some(field_def) => match field_def.type_.unmodified_type() {
__Type::InsertResponse(_) => {
Expand Down
13 changes: 10 additions & 3 deletions src/sql_types.rs
Expand Up @@ -756,8 +756,12 @@ pub(crate) fn get_one_readonly<A: FromDatum + IntoDatum>(

pub fn load_sql_config() -> Config {
let query = include_str!("../sql/load_sql_config.sql");
let sql_result: serde_json::Value = get_one_readonly::<JsonB>(query).unwrap().unwrap().0;
let config: Config = serde_json::from_value(sql_result).unwrap();
let sql_result: serde_json::Value = get_one_readonly::<JsonB>(query)
.expect("failed to read sql config")
.expect("sql config is missing")
.0;
let config: Config =
serde_json::from_value(sql_result).expect("failed to convert sql config into json");
config
}

Expand All @@ -776,7 +780,10 @@ pub fn calculate_hash<T: Hash>(t: &T) -> u64 {
pub fn load_sql_context(_config: &Config) -> Result<Arc<Context>, String> {
// cache value for next query
let query = include_str!("../sql/load_sql_context.sql");
let sql_result: serde_json::Value = get_one_readonly::<JsonB>(query).unwrap().unwrap().0;
let sql_result: serde_json::Value = get_one_readonly::<JsonB>(query)
.expect("failed to read sql context")
.expect("sql context is missing")
.0;
let context: Result<Context, serde_json::Error> = serde_json::from_value(sql_result);

/// This pass cross-reference types with its details
Expand Down
18 changes: 12 additions & 6 deletions src/transpile.rs
Expand Up @@ -12,11 +12,17 @@ use std::collections::HashSet;
use std::sync::Arc;

pub fn quote_ident(ident: &str) -> String {
unsafe { direct_function_call::<String>(pg_sys::quote_ident, &[ident.into_datum()]).unwrap() }
unsafe {
direct_function_call::<String>(pg_sys::quote_ident, &[ident.into_datum()])
.expect("failed to quote ident")
}
}

pub fn quote_literal(ident: &str) -> String {
unsafe { direct_function_call::<String>(pg_sys::quote_literal, &[ident.into_datum()]).unwrap() }
unsafe {
direct_function_call::<String>(pg_sys::quote_literal, &[ident.into_datum()])
.expect("failed to quote literal")
}
}

pub fn rand_block_name() -> String {
Expand Down Expand Up @@ -1305,10 +1311,10 @@ impl QueryEntrypoint for NodeBuilder {
let quoted_table = quote_ident(&self.table.name);
let object_clause = self.to_sql(&quoted_block_name, param_context)?;

if self.node_id.is_none() {
return Err("Expected nodeId argument missing".to_string());
}
let node_id = self.node_id.as_ref().unwrap();
let node_id = self
.node_id
.as_ref()
.ok_or("Expected nodeId argument missing")?;

let node_id_clause = node_id.to_sql(&quoted_block_name, &self.table, param_context)?;

Expand Down

0 comments on commit 65b16a4

Please sign in to comment.