1
+ from __future__ import unicode_literals
2
+
3
+ from future .standard_library import install_aliases
4
+ install_aliases ()
5
+
1
6
import anyjson
2
- import httplib
3
- import urllib
4
- import urlparse
5
7
import base64
6
8
import time
7
9
import hashlib
10
+ from urllib .parse import urlparse , urlunparse , urlencode
11
+
12
+ try :
13
+ # VCRpy only works when `httplib` is imported directly on Python 2.x
14
+ import httplib
15
+ except ImportError :
16
+ import http .client as httplib
8
17
9
- from version import VERSION
18
+ from . version import VERSION
10
19
11
20
USER_AGENT = 'Swiftype-Python/' + VERSION
12
21
DEFAULT_API_HOST = 'api.swiftype.com'
@@ -80,41 +89,41 @@ def destroy_documents(self, engine_id, document_type_id, document_ids=[]):
80
89
81
90
def search (self , engine_id , query , options = {}):
82
91
query_string = {'q' : query }
83
- full_query = dict (query_string .items () + options .items ())
92
+ full_query = dict (list ( query_string .items ()) + list ( options .items () ))
84
93
return self .conn ._get (self .__search_path (engine_id ), data = full_query )
85
94
86
95
def search_document_type (self , engine_id , document_type_id , query , options = {}):
87
96
query_string = {'q' : query }
88
- full_query = dict (query_string .items () + options .items ())
97
+ full_query = dict (list ( query_string .items ()) + list ( options .items () ))
89
98
return self .conn ._get (self .__document_type_search_path (engine_id , document_type_id ), data = full_query )
90
99
91
100
def suggest (self , engine_id , query , options = {}):
92
101
query_string = {'q' : query }
93
- full_query = dict (query_string .items () + options .items ())
102
+ full_query = dict (list ( query_string .items ()) + list ( options .items () ))
94
103
return self .conn ._get (self .__suggest_path (engine_id ), data = full_query )
95
104
96
105
def suggest_document_type (self , engine_id , document_type_id , query , options = {}):
97
106
query_string = {'q' : query }
98
- full_query = dict (query_string .items () + options .items ())
107
+ full_query = dict (list ( query_string .items ()) + list ( options .items () ))
99
108
return self .conn ._get (self .__document_type_suggest_path (engine_id , document_type_id ), data = full_query )
100
109
101
110
def analytics_searches (self , engine_id , start_date = None , end_date = None ):
102
- params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.iteritems () if v is not None )
111
+ params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.items () if v is not None )
103
112
return self .conn ._get (self .__analytics_path (engine_id ) + '/searches' , params )
104
113
105
114
def analytics_autoselects (self , engine_id , start_date = None , end_date = None ):
106
- params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.iteritems () if v is not None )
115
+ params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.items () if v is not None )
107
116
return self .conn ._get (self .__analytics_path (engine_id ) + '/autoselects' , params )
108
117
109
118
def analytics_top_queries (self , engine_id , page = None , per_page = None ):
110
119
return self .conn ._get (self .__analytics_path (engine_id ) + '/top_queries' , self .__pagination_params (page , per_page ))
111
120
112
121
def analytics_top_queries_in_range (self , engine_id , start_date = None , end_date = None ):
113
- params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.iteritems () if v is not None )
122
+ params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.items () if v is not None )
114
123
return self .conn ._get (self .__analytics_path (engine_id ) + '/top_queries_in_range' , params )
115
124
116
125
def analytics_top_no_result_queries (self , engine_id , start_date = None , end_date = None ):
117
- params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.iteritems () if v is not None )
126
+ params = dict ((k ,v ) for k ,v in {'start_date' : start_date , 'end_date' : end_date }.items () if v is not None )
118
127
return self .conn ._get (self .__analytics_path (engine_id ) + '/top_no_result_queries_in_range' , params )
119
128
120
129
def domains (self , engine_id ):
@@ -137,7 +146,7 @@ def crawl_url(self, engine_id, domain_id, url):
137
146
138
147
def users (self , page = None , per_page = None ):
139
148
params = {'client_id' : self .client_id , 'client_secret' : self .client_secret }
140
- return self .conn ._get (self .__users_path (), dict (params .items () + self .__pagination_params (page , per_page ).items ()))
149
+ return self .conn ._get (self .__users_path (), dict (list ( params .items ()) + list ( self .__pagination_params (page , per_page ).items () )))
141
150
142
151
def user (self , user_id ):
143
152
params = {'client_id' : self .client_id , 'client_secret' : self .client_secret }
@@ -150,10 +159,12 @@ def create_user(self):
150
159
def sso_url (self , user_id ):
151
160
timestamp = self ._get_timestamp ()
152
161
params = {'user_id' : user_id , 'client_id' : self .client_id , 'timestamp' : timestamp , 'token' : self ._sso_token (user_id , timestamp )}
153
- return urlparse . urlunparse (('https' , 'swiftype.com' , '/sso' , '' , urllib . urlencode (params ), '' ))
162
+ return urlunparse (('https' , 'swiftype.com' , '/sso' , '' , urlencode (params ), '' ))
154
163
155
164
def _sso_token (self , user_id , timestamp ):
156
- return hashlib .sha1 ('%s:%s:%s' % (user_id , self .client_secret , timestamp )).hexdigest ()
165
+ return hashlib .sha1 ((
166
+ '%s:%s:%s' % (user_id , self .client_secret , timestamp )
167
+ ).encode ('utf-8' )).hexdigest ()
157
168
158
169
def _get_timestamp (self ):
159
170
return int (time .time ())
@@ -175,7 +186,7 @@ def __users_path(self): return 'users'
175
186
def __user_path (self , user_id ): return 'users/%s' % (user_id )
176
187
177
188
def __pagination_params (self , page , per_page ):
178
- return dict ((k ,v ) for k ,v in {'page' : page , 'per_page' : per_page }.iteritems () if v is not None )
189
+ return dict ((k ,v ) for k ,v in {'page' : page , 'per_page' : per_page }.items () if v is not None )
179
190
180
191
class HttpException (Exception ):
181
192
def __init__ (self , status , msg ):
@@ -224,7 +235,7 @@ def _request(self, method, path, params={}, data={}):
224
235
raise Unauthorized ('Authorization required.' )
225
236
226
237
full_path = self .__base_path + path + '.json'
227
- query = urllib . urlencode (params , True )
238
+ query = urlencode (params , True )
228
239
if query :
229
240
full_path += '?' + query
230
241
@@ -235,11 +246,11 @@ def _request(self, method, path, params={}, data={}):
235
246
236
247
response = connection .getresponse ()
237
248
response .body = response .read ()
238
- if (response .status / 100 == 2 ):
249
+ if (response .status // 100 == 2 ):
239
250
if response .body :
240
251
try :
241
- response .body = anyjson .deserialize (response .body )
242
- except ValueError , e :
252
+ response .body = anyjson .deserialize (response .body . decode ( 'utf-8' ) )
253
+ except ValueError as e :
243
254
raise InvalidResponseFromServer ('The JSON response could not be parsed: %s.\n %s' % (e , response .body ))
244
255
ret = {'status' : response .status , 'body' :response .body }
245
256
else :
0 commit comments