1
+ from score import *
2
+ from src .main import *
3
+ import logging
4
+ from src .QA_integration_new import QA_RAG
5
+ from langserve import add_routes
6
+ import asyncio
7
+ import os
8
+
9
+
10
+ uri = ''
11
+ userName = ''
12
+ password = ''
13
+ model = 'OpenAI GPT 3.5'
14
+ database = ''
15
+ CHUNK_DIR = os .path .join (os .path .dirname (__file__ ), "chunks" )
16
+ MERGED_DIR = os .path .join (os .path .dirname (__file__ ), "merged_files" )
17
+ graph = create_graph_database_connection (uri , userName , password , database )
18
+
19
+ def test_graph_from_file_local_file ():
20
+ file_name = 'About Amazon.pdf'
21
+ #shutil.copyfile('data/Bank of America Q23.pdf', 'backend/src/merged_files/Bank of America Q23.pdf')
22
+ shutil .copyfile ('/workspaces/llm-graph-builder/data/About Amazon.pdf' , '/workspaces/llm-graph-builder/backend/merged_files/About Amazon.pdf' )
23
+ obj_source_node = sourceNode ()
24
+ obj_source_node .file_name = file_name
25
+ obj_source_node .file_type = 'pdf'
26
+ obj_source_node .file_size = '1087'
27
+ obj_source_node .file_source = 'local file'
28
+ obj_source_node .model = model
29
+ obj_source_node .created_at = datetime .now ()
30
+ graphDb_data_Access = graphDBdataAccess (graph )
31
+ graphDb_data_Access .create_source_node (obj_source_node )
32
+ merged_file_path = os .path .join (MERGED_DIR ,file_name )
33
+
34
+ local_file_result = extract_graph_from_file_local_file (graph , model , file_name ,merged_file_path , '' , '' )
35
+
36
+ print (local_file_result )
37
+
38
+ logging .info ("Info: " )
39
+ try :
40
+ assert local_file_result ['status' ] == 'Completed' and local_file_result ['nodeCount' ]> 5 and local_file_result ['relationshipCount' ]> 10
41
+ print ("Success" )
42
+ except AssertionError as e :
43
+ print ("Fail: " , e )
44
+
45
+ def test_graph_from_file_local_file_failed ():
46
+ file_name = 'Not_exist.pdf'
47
+ try :
48
+ obj_source_node = sourceNode ()
49
+ obj_source_node .file_name = file_name
50
+ obj_source_node .file_type = 'pdf'
51
+ obj_source_node .file_size = '0'
52
+ obj_source_node .file_source = 'local file'
53
+ obj_source_node .model = model
54
+ obj_source_node .created_at = datetime .now ()
55
+ graphDb_data_Access = graphDBdataAccess (graph )
56
+ graphDb_data_Access .create_source_node (obj_source_node )
57
+
58
+ local_file_result = extract_graph_from_file_local_file (graph , model , file_name ,merged_file_path , '' , '' )
59
+
60
+ print (local_file_result )
61
+ except AssertionError as e :
62
+ print ('Failed due to file does not exist means not uploaded or accidentaly deleteled from server' )
63
+ print ("Failed: Error from extract function " , e )
64
+
65
+ # Check for Wikipedia file to be success
66
+ def test_graph_from_Wikipedia ():
67
+ wiki_query = 'Norway'
68
+ source_type = 'Wikipedia'
69
+ create_source_node_graph_url_wikipedia (graph , model , wiki_query , source_type )
70
+ wikiresult = extract_graph_from_file_Wikipedia (graph , model , wiki_query , 1 , '' , '' )
71
+ logging .info ("Info: Wikipedia test done" )
72
+ print (wikiresult )
73
+ try :
74
+ assert wikiresult ['status' ] == 'Completed' and wikiresult ['nodeCount' ]> 10 and wikiresult ['relationshipCount' ]> 15
75
+ print ("Success" )
76
+ except AssertionError as e :
77
+ print ("Fail " , e )
78
+
79
+ def test_graph_from_Wikipedia_failed ():
80
+ wiki_query = 'Test QA 123456'
81
+ source_type = 'Wikipedia'
82
+ try :
83
+ logging .info ("Created source node for wikipedia" )
84
+ create_source_node_graph_url_wikipedia (graph , model , wiki_query , source_type )
85
+ except AssertionError as e :
86
+ print ("Fail " , e )
87
+
88
+
89
+ # Check for Youtube_video to be Success
90
+ def test_graph_from_youtube_video ():
91
+ url = 'https://www.youtube.com/watch?v=T-qy-zPWgqA'
92
+ source_type = 'youtube'
93
+
94
+ create_source_node_graph_url_youtube (graph , model ,url , source_type )
95
+ youtuberesult = extract_graph_from_file_youtube (graph , model , url , '' , '' )
96
+
97
+ logging .info ("Info: Youtube Video test done" )
98
+ print (youtuberesult )
99
+ try :
100
+ assert youtuberesult ['status' ] == 'Completed' and youtuberesult ['nodeCount' ]> 60 and youtuberesult ['relationshipCount' ]> 40
101
+ print ("Success" )
102
+ except AssertionError as e :
103
+ print ("Failed " , e )
104
+
105
+ # Check for Youtube_video to be Failed
106
+
107
+ def test_graph_from_youtube_video_failed ():
108
+ url = 'https://www.youtube.com/watch?v=U9mJuUkhUzk'
109
+ source_type = 'youtube'
110
+
111
+ create_source_node_graph_url_youtube (graph , model ,url , source_type )
112
+ youtuberesult = extract_graph_from_file_youtube (graph , model , url , ',' , ',' )
113
+ # print(result)
114
+ print (youtuberesult )
115
+ try :
116
+ assert youtuberesult ['status' ] == 'Completed'
117
+ print ("Success" )
118
+ except AssertionError as e :
119
+ print ("Failed " , e )
120
+
121
+ # Check for the GCS file to be uploaded, process and completed
122
+
123
+ def test_graph_from_file_test_gcs ():
124
+
125
+ bucket_name = 'llm_graph_transformer_test'
126
+ folder_name = 'technology'
127
+ source_type = 'gcs bucket'
128
+ file_name = 'Neuralink brain chip patient playing chess.pdf'
129
+ create_source_node_graph_url_gcs (graph , model , bucket_name , folder_name , source_type )
130
+ gcsresult = extract_graph_from_file_gcs (graph , model , bucket_name , folder_name , file_name , '' , '' )
131
+
132
+ logging .info ("Info" )
133
+ print (gcsresult )
134
+
135
+ try :
136
+ assert gcsresult ['status' ] == 'Completed' and gcsresult ['nodeCount' ]> 10 and gcsresult ['relationshipCount' ]> 5
137
+ print ("Success" )
138
+ except AssertionError as e :
139
+ print ("Failed " , e )
140
+
141
+ def test_graph_from_file_test_gcs_failed ():
142
+
143
+ bucket_name = 'llm_graph_transformer_neo'
144
+ folder_name = 'technology'
145
+ source_type = 'gcs bucket'
146
+ # file_name = 'Neuralink brain chip patient playing chess.pdf'
147
+ try :
148
+ create_source_node_graph_url_gcs (graph , model , bucket_name , folder_name , source_type )
149
+ print ("GCS: Create source node failed due to bucket not exist" )
150
+ except AssertionError as e :
151
+ print ("Failed " , e )
152
+
153
+ def test_graph_from_file_test_s3_failed ():
154
+ source_url = 's3://development-llm-graph-builder-models/'
155
+ try :
156
+ create_source_node_graph_url_s3 (graph ,model ,source_url ,'test123' ,'pwd123' )
157
+ # assert result['status'] == 'Failed'
158
+ # print("S3 created source node failed die to wrong access key id and secret")
159
+ except AssertionError as e :
160
+ print ("Failed " , e )
161
+
162
+ # Check the Functionality of Chatbot QnA
163
+ def test_chatbot_QnA ():
164
+ QA_n_RAG = QA_RAG (graph , model ,'who is patrick pichette' ,1 )
165
+
166
+ print (QA_n_RAG )
167
+ print (len (QA_n_RAG ['message' ]))
168
+ try :
169
+ assert len (QA_n_RAG ['message' ]) > 20
170
+ print ("Success" )
171
+ except AssertionError as e :
172
+ print ("Failed " , e )
173
+
174
+
175
+ if __name__ == "__main__" :
176
+
177
+ test_graph_from_file_local_file () # local file Success Test Case
178
+ #test_graph_from_file_local_file_failed() # local file Failed Test Case
179
+
180
+ test_graph_from_Wikipedia () # Wikipedia Success Test Case
181
+ #test_graph_from_Wikipedia_failed() # Wikipedia Failed Test Case
182
+
183
+ test_graph_from_youtube_video () # Youtube Success Test Case
184
+ #test_graph_from_youtube_video_failed # Failed Test case
185
+
186
+ test_graph_from_file_test_gcs () # GCS Success Test Case
187
+ test_chatbot_QnA ()
188
+
189
+ #test_graph_from_file_test_s3_failed() # S3 Failed Test Case
190
+
0 commit comments