@@ -62,7 +62,8 @@ def process_entities(entities):
62
62
@click .option ('--max-token-count' , '-c' , default = 1024 , help = 'max number of processed CSVs to send per query (default 1024)' )
63
63
@click .option ('--max-buffer-size' , '-b' , default = 2048 , help = 'max buffer size in megabytes (default 2048)' )
64
64
@click .option ('--max-token-size' , '-t' , default = 500 , help = 'max size of each token in megabytes (default 500, max 512)' )
65
- def bulk_insert (graph , host , port , password , nodes , nodes_with_label , relations , relations_with_type , separator , enforce_schema , skip_invalid_nodes , skip_invalid_edges , quote , max_token_count , max_buffer_size , max_token_size ):
65
+ @click .option ('--index' , '-i' , multiple = True , help = 'Label:Propery on which to create an index' )
66
+ def bulk_insert (graph , host , port , password , nodes , nodes_with_label , relations , relations_with_type , separator , enforce_schema , skip_invalid_nodes , skip_invalid_edges , quote , max_token_count , max_buffer_size , max_token_size , index ):
66
67
if sys .version_info [0 ] < 3 :
67
68
raise Exception ("Python 3 is required for the RedisGraph bulk loader." )
68
69
@@ -115,6 +116,16 @@ def bulk_insert(graph, host, port, password, nodes, nodes_with_label, relations,
115
116
end_time = timer ()
116
117
query_buf .report_completion (end_time - start_time )
117
118
119
+ for i in index :
120
+ l , p = i .split (":" )
121
+ print ("Creating Index on Label: %s, Property: %s" % (l , p ))
122
+ try :
123
+ index_create = client .execute_command ("GRAPH.QUERY" , graph , "CREATE INDEX ON :%s(%s)" % (l , p ))
124
+ for z in index_create :
125
+ print (z [0 ].decode ("utf-8" ) )
126
+ except redis .exceptions .ResponseError as e :
127
+ print ("Unable to create Index on Label: %s, Property %s" % (l , p ))
128
+ print (e )
118
129
119
130
if __name__ == '__main__' :
120
131
bulk_insert ()
0 commit comments