15
15
RequestDataTooBig ,
16
16
SuspiciousMultipartForm ,
17
17
TooManyFieldsSent ,
18
+ TooManyFilesSent ,
18
19
)
19
20
from django .core .files .uploadhandler import SkipFile , StopFutureHandlers , StopUpload
20
21
from django .utils .datastructures import MultiValueDict
@@ -39,6 +40,7 @@ class InputStreamExhausted(Exception):
39
40
RAW = "raw"
40
41
FILE = "file"
41
42
FIELD = "field"
43
+ FIELD_TYPES = frozenset ([FIELD , RAW ])
42
44
43
45
44
46
class MultiPartParser :
@@ -111,6 +113,22 @@ def __init__(self, META, input_data, upload_handlers, encoding=None):
111
113
self ._upload_handlers = upload_handlers
112
114
113
115
def parse (self ):
116
+ # Call the actual parse routine and close all open files in case of
117
+ # errors. This is needed because if exceptions are thrown the
118
+ # MultiPartParser will not be garbage collected immediately and
119
+ # resources would be kept alive. This is only needed for errors because
120
+ # the Request object closes all uploaded files at the end of the
121
+ # request.
122
+ try :
123
+ return self ._parse ()
124
+ except Exception :
125
+ if hasattr (self , "_files" ):
126
+ for _ , files in self ._files .lists ():
127
+ for fileobj in files :
128
+ fileobj .close ()
129
+ raise
130
+
131
+ def _parse (self ):
114
132
"""
115
133
Parse the POST data and break it into a FILES MultiValueDict and a POST
116
134
MultiValueDict.
@@ -156,6 +174,8 @@ def parse(self):
156
174
num_bytes_read = 0
157
175
# To count the number of keys in the request.
158
176
num_post_keys = 0
177
+ # To count the number of files in the request.
178
+ num_files = 0
159
179
# To limit the amount of data read from the request.
160
180
read_size = None
161
181
# Whether a file upload is finished.
@@ -171,6 +191,20 @@ def parse(self):
171
191
old_field_name = None
172
192
uploaded_file = True
173
193
194
+ if (
195
+ item_type in FIELD_TYPES
196
+ and settings .DATA_UPLOAD_MAX_NUMBER_FIELDS is not None
197
+ ):
198
+ # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
199
+ num_post_keys += 1
200
+ # 2 accounts for empty raw fields before and after the
201
+ # last boundary.
202
+ if settings .DATA_UPLOAD_MAX_NUMBER_FIELDS + 2 < num_post_keys :
203
+ raise TooManyFieldsSent (
204
+ "The number of GET/POST parameters exceeded "
205
+ "settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."
206
+ )
207
+
174
208
try :
175
209
disposition = meta_data ["content-disposition" ][1 ]
176
210
field_name = disposition ["name" ].strip ()
@@ -183,17 +217,6 @@ def parse(self):
183
217
field_name = force_str (field_name , encoding , errors = "replace" )
184
218
185
219
if item_type == FIELD :
186
- # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
187
- num_post_keys += 1
188
- if (
189
- settings .DATA_UPLOAD_MAX_NUMBER_FIELDS is not None
190
- and settings .DATA_UPLOAD_MAX_NUMBER_FIELDS < num_post_keys
191
- ):
192
- raise TooManyFieldsSent (
193
- "The number of GET/POST parameters exceeded "
194
- "settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."
195
- )
196
-
197
220
# Avoid reading more than DATA_UPLOAD_MAX_MEMORY_SIZE.
198
221
if settings .DATA_UPLOAD_MAX_MEMORY_SIZE is not None :
199
222
read_size = (
@@ -228,6 +251,16 @@ def parse(self):
228
251
field_name , force_str (data , encoding , errors = "replace" )
229
252
)
230
253
elif item_type == FILE :
254
+ # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FILES.
255
+ num_files += 1
256
+ if (
257
+ settings .DATA_UPLOAD_MAX_NUMBER_FILES is not None
258
+ and num_files > settings .DATA_UPLOAD_MAX_NUMBER_FILES
259
+ ):
260
+ raise TooManyFilesSent (
261
+ "The number of files exceeded "
262
+ "settings.DATA_UPLOAD_MAX_NUMBER_FILES."
263
+ )
231
264
# This is a file, use the handler...
232
265
file_name = disposition .get ("filename" )
233
266
if file_name :
@@ -305,8 +338,13 @@ def parse(self):
305
338
# Handle file upload completions on next iteration.
306
339
old_field_name = field_name
307
340
else :
308
- # If this is neither a FIELD or a FILE, just exhaust the stream.
309
- exhaust (stream )
341
+ # If this is neither a FIELD nor a FILE, exhaust the field
342
+ # stream. Note: There could be an error here at some point,
343
+ # but there will be at least two RAW types (before and
344
+ # after the other boundaries). This branch is usually not
345
+ # reached at all, because a missing content-disposition
346
+ # header will skip the whole boundary.
347
+ exhaust (field_stream )
310
348
except StopUpload as e :
311
349
self ._close_files ()
312
350
if not e .connection_reset :
0 commit comments