Skip to content

Commit 39b8db4

Browse files
committed
Allow to deepcopy a pickled schema (#173)
1 parent 9aca33c commit 39b8db4

File tree

3 files changed

+101
-49
lines changed

3 files changed

+101
-49
lines changed

src/graphql/type/schema.py

+16-6
Original file line numberDiff line numberDiff line change
@@ -208,8 +208,8 @@ def __init__(
208208
# Provide specified directives (e.g. @include and @skip) by default
209209
self.directives = specified_directives if directives is None else directives
210210

211-
# To preserve order of user-provided types, we add first to add them to
212-
# the set of "collected" types, so `collect_referenced_types` ignore them.
211+
# To preserve order of user-provided types, we first add them to the set
212+
# of "collected" types, so `collect_referenced_types` ignores them.
213213
if types:
214214
all_referenced_types = TypeSet.with_initial_types(types)
215215
collect_referenced_types = all_referenced_types.collect_referenced_types
@@ -258,10 +258,20 @@ def __init__(
258258
" is missing a name.",
259259
)
260260
if type_name in type_map:
261-
raise TypeError(
262-
"Schema must contain uniquely named types"
263-
f" but contains multiple types named '{type_name}'."
264-
)
261+
from ..type import specified_scalar_types
262+
263+
if (
264+
type_name in specified_scalar_types
265+
and type_map[type_name] is not specified_scalar_types[type_name]
266+
):
267+
# allow replacing a copy of a specified scalar type
268+
named_type = specified_scalar_types[type_name]
269+
else:
270+
raise TypeError(
271+
"Schema must contain uniquely named types"
272+
f" but contains multiple types named '{type_name}'."
273+
)
274+
265275
type_map[type_name] = named_type
266276

267277
if is_interface_type(named_type):

tests/language/test_schema_parser.py

+35-16
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import pickle
2+
from copy import deepcopy
13
from textwrap import dedent
24
from typing import List, Optional, Tuple
35

@@ -797,19 +799,36 @@ def directive_with_incorrect_locations():
797799
def parses_kitchen_sink_schema(kitchen_sink_sdl): # noqa: F811
798800
assert parse(kitchen_sink_sdl)
799801

800-
def can_pickle_and_unpickle_kitchen_sink_schema_ast(kitchen_sink_sdl): # noqa: F811
801-
import pickle
802-
803-
# create a schema AST from the kitchen sink SDL
804-
doc = parse(kitchen_sink_sdl)
805-
# check that the schema AST can be pickled
806-
# (particularly, there should be no recursion error)
807-
dumped = pickle.dumps(doc)
808-
# check that the pickle size is reasonable
809-
assert len(dumped) < 50 * len(kitchen_sink_sdl)
810-
loaded = pickle.loads(dumped)
811-
# check that the un-pickled schema AST is still the same
812-
assert loaded == doc
813-
# check that pickling again creates the same result
814-
dumped_again = pickle.dumps(doc)
815-
assert dumped_again == dumped
802+
def describe_deepcopy_and_pickle():
803+
def can_deep_copy_ast(kitchen_sink_sdl): # noqa: F811
804+
# create a schema AST from the kitchen sink SDL
805+
doc = parse(kitchen_sink_sdl)
806+
# make a deepcopy of the schema AST
807+
copied_doc = deepcopy(doc)
808+
# check that the copied AST is equal to the original one
809+
assert copied_doc == doc
810+
811+
def can_pickle_and_unpickle_ast(kitchen_sink_sdl): # noqa: F811
812+
# create a schema AST from the kitchen sink SDL
813+
doc = parse(kitchen_sink_sdl)
814+
# check that the schema AST can be pickled
815+
# (particularly, there should be no recursion error)
816+
dumped = pickle.dumps(doc)
817+
# check that the pickle size is reasonable
818+
assert len(dumped) < 50 * len(kitchen_sink_sdl)
819+
loaded = pickle.loads(dumped)
820+
# check that the un-pickled schema AST is still the same
821+
assert loaded == doc
822+
# check that pickling again creates the same result
823+
dumped_again = pickle.dumps(doc)
824+
assert dumped_again == dumped
825+
826+
def can_deep_copy_pickled_ast(kitchen_sink_sdl): # noqa: F811
827+
# create a schema AST from the kitchen sink SDL
828+
doc = parse(kitchen_sink_sdl)
829+
# pickle and unpickle the schema AST
830+
loaded_doc = pickle.loads(pickle.dumps(doc))
831+
# make a deepcopy of this
832+
copied_doc = deepcopy(loaded_doc)
833+
# check that the result is still equal to the original schema AST
834+
assert copied_doc == doc

tests/utilities/test_build_ast_schema.py

+50-27
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1+
import pickle
12
from collections import namedtuple
3+
from copy import deepcopy
24
from typing import Union
35

46
from pytest import raises
@@ -1186,30 +1188,51 @@ def rejects_invalid_ast():
11861188
build_ast_schema({}) # type: ignore
11871189
assert str(exc_info.value) == "Must provide valid Document AST."
11881190

1189-
def can_pickle_and_unpickle_big_schema(
1190-
big_schema_sdl, # noqa: F811
1191-
): # pragma: no cover
1192-
import pickle
1193-
1194-
# use our printing conventions
1195-
big_schema_sdl = cycle_sdl(big_schema_sdl)
1196-
1197-
# create a schema from the kitchen sink SDL
1198-
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
1199-
# check that the schema can be pickled
1200-
# (particularly, there should be no recursion error,
1201-
# or errors because of trying to pickle lambdas or local functions)
1202-
dumped = pickle.dumps(schema)
1203-
1204-
# check that the pickle size is reasonable
1205-
assert len(dumped) < 25 * len(big_schema_sdl)
1206-
loaded = pickle.loads(dumped)
1207-
1208-
# check that printing the unpickled schema gives the same SDL
1209-
assert print_schema(loaded) == big_schema_sdl
1210-
1211-
# check that pickling again creates the same result
1212-
dumped = pickle.dumps(schema)
1213-
assert len(dumped) < 25 * len(big_schema_sdl)
1214-
loaded = pickle.loads(dumped)
1215-
assert print_schema(loaded) == big_schema_sdl
1191+
def describe_deepcopy_and_pickle():
1192+
def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811
1193+
# use our printing conventions
1194+
big_schema_sdl = cycle_sdl(big_schema_sdl)
1195+
1196+
# create a schema from the kitchen sink SDL
1197+
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
1198+
# create a deepcopy of the schema
1199+
copied = deepcopy(schema)
1200+
# check that printing the copied schema gives the same SDL
1201+
assert print_schema(copied) == big_schema_sdl
1202+
1203+
def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811
1204+
# use our printing conventions
1205+
big_schema_sdl = cycle_sdl(big_schema_sdl)
1206+
1207+
# create a schema from the kitchen sink SDL
1208+
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
1209+
# check that the schema can be pickled
1210+
# (particularly, there should be no recursion error,
1211+
# or errors because of trying to pickle lambdas or local functions)
1212+
dumped = pickle.dumps(schema)
1213+
1214+
# check that the pickle size is reasonable
1215+
assert len(dumped) < 25 * len(big_schema_sdl)
1216+
loaded = pickle.loads(dumped)
1217+
1218+
# check that printing the unpickled schema gives the same SDL
1219+
assert print_schema(loaded) == big_schema_sdl
1220+
1221+
# check that pickling again creates the same result
1222+
dumped = pickle.dumps(schema)
1223+
assert len(dumped) < 25 * len(big_schema_sdl)
1224+
loaded = pickle.loads(dumped)
1225+
assert print_schema(loaded) == big_schema_sdl
1226+
1227+
def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811
1228+
# use our printing conventions
1229+
big_schema_sdl = cycle_sdl(big_schema_sdl)
1230+
1231+
# create a schema from the kitchen sink SDL
1232+
schema = build_schema(big_schema_sdl, assume_valid_sdl=True)
1233+
# pickle and unpickle the schema
1234+
loaded = pickle.loads(pickle.dumps(schema))
1235+
# create a deepcopy of the unpickled schema
1236+
copied = deepcopy(loaded)
1237+
# check that printing the copied schema gives the same SDL
1238+
assert print_schema(copied) == big_schema_sdl

0 commit comments

Comments
 (0)