Skip to content

Commit 2abf79f

Browse files
committed
Test
Committed-by: acezen from Dev container Committed-by: acezen from Dev container
1 parent 39a1ced commit 2abf79f

File tree

3 files changed

+14
-226
lines changed

3 files changed

+14
-226
lines changed

python/graphscope/dataset/ldbc.py

-217
Original file line numberDiff line numberDiff line change
@@ -81,22 +81,6 @@ def load_ldbc(sess=None, prefix=None, directed=True):
8181
["creationDate", "locationIP", "browserUsed", "content", "length"],
8282
"id",
8383
),
84-
"organisation": (
85-
Loader(
86-
os.path.join(prefix, "organisation_0_0.csv"),
87-
header_row=True,
88-
delimiter="|",
89-
),
90-
["type", "name", "url"],
91-
"id",
92-
),
93-
"tagclass": (
94-
Loader(
95-
os.path.join(prefix, "tagclass_0_0.csv"), header_row=True, delimiter="|"
96-
),
97-
["name", "url"],
98-
"id",
99-
),
10084
"person": (
10185
Loader(
10286
os.path.join(prefix, "person_0_0.csv"), header_row=True, delimiter="|"
@@ -112,20 +96,6 @@ def load_ldbc(sess=None, prefix=None, directed=True):
11296
],
11397
"id",
11498
),
115-
"forum": (
116-
Loader(
117-
os.path.join(prefix, "forum_0_0.csv"), header_row=True, delimiter="|"
118-
),
119-
["title", "creationDate"],
120-
"id",
121-
),
122-
"place": (
123-
Loader(
124-
os.path.join(prefix, "place_0_0.csv"), header_row=True, delimiter="|"
125-
),
126-
["name", "url", "type"],
127-
"id",
128-
),
12999
"post": (
130100
Loader(
131101
os.path.join(prefix, "post_0_0.csv"), header_row=True, delimiter="|"
@@ -141,11 +111,6 @@ def load_ldbc(sess=None, prefix=None, directed=True):
141111
],
142112
"id",
143113
),
144-
"tag": (
145-
Loader(os.path.join(prefix, "tag_0_0.csv"), header_row=True, delimiter="|"),
146-
["name", "url"],
147-
"id",
148-
),
149114
}
150115
edges = {
151116
"replyOf": [
@@ -170,62 +135,6 @@ def load_ldbc(sess=None, prefix=None, directed=True):
170135
("Post.id", "post"),
171136
),
172137
],
173-
"isPartOf": [
174-
(
175-
Loader(
176-
os.path.join(prefix, "place_isPartOf_place_0_0.csv"),
177-
header_row=True,
178-
delimiter="|",
179-
),
180-
[],
181-
("Place.id", "place"),
182-
("Place.id.1", "place"),
183-
)
184-
],
185-
"isSubclassOf": [
186-
(
187-
Loader(
188-
os.path.join(prefix, "tagclass_isSubclassOf_tagclass_0_0.csv"),
189-
header_row=True,
190-
delimiter="|",
191-
),
192-
[],
193-
("TagClass.id", "tagclass"),
194-
("TagClass.id.1", "tagclass"),
195-
)
196-
],
197-
"hasTag": [
198-
(
199-
Loader(
200-
os.path.join(prefix, "forum_hasTag_tag_0_0.csv"),
201-
header_row=True,
202-
delimiter="|",
203-
),
204-
[],
205-
("Forum.id", "forum"),
206-
("Tag.id", "tag"),
207-
),
208-
(
209-
Loader(
210-
os.path.join(prefix, "comment_hasTag_tag_0_0.csv"),
211-
header_row=True,
212-
delimiter="|",
213-
),
214-
[],
215-
("Comment.id", "comment"),
216-
("Tag.id", "tag"),
217-
),
218-
(
219-
Loader(
220-
os.path.join(prefix, "post_hasTag_tag_0_0.csv"),
221-
header_row=True,
222-
delimiter="|",
223-
),
224-
[],
225-
("Post.id", "post"),
226-
("Tag.id", "tag"),
227-
),
228-
],
229138
"knows": [
230139
(
231140
Loader(
@@ -238,84 +147,6 @@ def load_ldbc(sess=None, prefix=None, directed=True):
238147
("Person.id.1", "person"),
239148
)
240149
],
241-
"hasModerator": [
242-
(
243-
Loader(
244-
os.path.join(prefix, "forum_hasModerator_person_0_0.csv"),
245-
header_row=True,
246-
delimiter="|",
247-
),
248-
[],
249-
("Forum.id", "forum"),
250-
("Person.id", "person"),
251-
)
252-
],
253-
"hasInterest": [
254-
(
255-
Loader(
256-
os.path.join(prefix, "person_hasInterest_tag_0_0.csv"),
257-
header_row=True,
258-
delimiter="|",
259-
),
260-
[],
261-
("Person.id", "person"),
262-
("Tag.id", "tag"),
263-
)
264-
],
265-
"isLocatedIn": [
266-
(
267-
Loader(
268-
os.path.join(prefix, "post_isLocatedIn_place_0_0.csv"),
269-
header_row=True,
270-
delimiter="|",
271-
),
272-
[],
273-
("Post.id", "post"),
274-
("Place.id", "place"),
275-
),
276-
(
277-
Loader(
278-
os.path.join(prefix, "comment_isLocatedIn_place_0_0.csv"),
279-
header_row=True,
280-
delimiter="|",
281-
),
282-
[],
283-
("Comment.id", "comment"),
284-
("Place.id", "place"),
285-
),
286-
(
287-
Loader(
288-
os.path.join(prefix, "organisation_isLocatedIn_place_0_0.csv"),
289-
header_row=True,
290-
delimiter="|",
291-
),
292-
[],
293-
("Organisation.id", "organisation"),
294-
("Place.id", "place"),
295-
),
296-
(
297-
Loader(
298-
os.path.join(prefix, "person_isLocatedIn_place_0_0.csv"),
299-
header_row=True,
300-
delimiter="|",
301-
),
302-
[],
303-
("Person.id", "person"),
304-
("Place.id", "place"),
305-
),
306-
],
307-
"hasType": [
308-
(
309-
Loader(
310-
os.path.join(prefix, "tag_hasType_tagclass_0_0.csv"),
311-
header_row=True,
312-
delimiter="|",
313-
),
314-
[],
315-
("Tag.id", "tag"),
316-
("TagClass.id", "tagclass"),
317-
)
318-
],
319150
"hasCreator": [
320151
(
321152
Loader(
@@ -338,42 +169,6 @@ def load_ldbc(sess=None, prefix=None, directed=True):
338169
("Person.id", "person"),
339170
),
340171
],
341-
"containerOf": [
342-
(
343-
Loader(
344-
os.path.join(prefix, "forum_containerOf_post_0_0.csv"),
345-
header_row=True,
346-
delimiter="|",
347-
),
348-
[],
349-
("Forum.id", "forum"),
350-
("Post.id", "post"),
351-
)
352-
],
353-
"hasMember": [
354-
(
355-
Loader(
356-
os.path.join(prefix, "forum_hasMember_person_0_0.csv"),
357-
header_row=True,
358-
delimiter="|",
359-
),
360-
["joinDate"],
361-
("Forum.id", "forum"),
362-
("Person.id", "person"),
363-
)
364-
],
365-
"workAt": [
366-
(
367-
Loader(
368-
os.path.join(prefix, "person_workAt_organisation_0_0.csv"),
369-
header_row=True,
370-
delimiter="|",
371-
),
372-
["workFrom"],
373-
("Person.id", "person"),
374-
("Organisation.id", "organisation"),
375-
)
376-
],
377172
"likes": [
378173
(
379174
Loader(
@@ -396,17 +191,5 @@ def load_ldbc(sess=None, prefix=None, directed=True):
396191
("Post.id", "post"),
397192
),
398193
],
399-
"studyAt": [
400-
(
401-
Loader(
402-
os.path.join(prefix, "person_studyAt_organisation_0_0.csv"),
403-
header_row=True,
404-
delimiter="|",
405-
),
406-
["classYear"],
407-
("Person.id", "person"),
408-
("Organisation.id", "organisation"),
409-
)
410-
],
411194
}
412195
return sess.load_from(edges, vertices, directed, generate_eid=True, retain_oid=True)

python/graphscope/tests/unittest/test_graphar.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -19,26 +19,31 @@
1919
import os
2020

2121
from graphscope.framework.graph import Graph
22+
from graphscope import pagerank
2223

2324
graphar_test_repo_dir = os.path.expandvars("${GS_TEST_DIR}")
2425

2526

2627
def test_load_from_graphar(graphscope_session):
27-
graph_yaml = os.path.join(
28-
graphar_test_repo_dir, "graphar/ldbc_sample/parquet/ldbc_sample.graph.yml"
29-
)
30-
graph_yaml_path = "graphar+file://" + graph_yaml
28+
# graph_yaml = os.path.join(
29+
# graphar_test_repo_dir, "graphar/ldbc_sample/parquet/ldbc_sample.graph.yml"
30+
# )
31+
# graph_yaml_path = "graphar+file://" + graph_yaml
32+
graph_yaml_path = "graphar+file:///tmp/graphar/ldbc_sample.graph.yml"
3133
print(graph_yaml_path)
3234
g = Graph.load_from(graph_yaml_path, graphscope_session)
35+
# ldbc_simple = g.project(vertices={"person": []}, edges={"knows": []})
36+
# ctx = pagerank(ldbc_simple)
37+
# print(ctx.to_dataframe({"id": "v.id", "value": "r"}))
3338
assert g.schema is not None
3439
del g
3540

3641

3742
def test_save_to_graphar(ldbc_graph):
3843
graphar_options = {
3944
"graph_name": "ldbc_sample",
40-
"file_type": "orc",
41-
"vertex_block_size": 256,
42-
"edge_block_size": 1024,
45+
"file_type": "parquet",
46+
"vertex_block_size": 500,
47+
"edge_block_size": 500,
4348
}
44-
ldbc_graph.save_to("/tmp/", format="graphar", graphar_options=graphar_options)
49+
ldbc_graph.save_to("/tmp/graphar/", format="graphar", graphar_options=graphar_options)

python/setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ def parse_version(root, **kwargs):
314314
package_dir=resolve_graphscope_package_dir(),
315315
packages=find_graphscope_packages(),
316316
package_data=parsed_package_data(),
317-
ext_modules=build_learning_engine(),
317+
# ext_modules=build_learning_engine(),
318318
cmdclass={
319319
"build_ext": CustomBuildExt,
320320
"build_proto": BuildProto,

0 commit comments

Comments
 (0)