-
Notifications
You must be signed in to change notification settings - Fork 72
/
Copy pathfiles.py
1005 lines (848 loc) · 32.7 KB
/
files.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import io
import os
import mimetypes
import json
from googleapiclient import errors
from googleapiclient.http import MediaIoBaseUpload
from googleapiclient.http import MediaIoBaseDownload
from googleapiclient.http import DEFAULT_CHUNK_SIZE
from functools import wraps
from .apiattr import ApiAttribute
from .apiattr import ApiAttributeMixin
from .apiattr import ApiResource
from .apiattr import ApiResourceList
from .auth import LoadAuth
BLOCK_SIZE = 1024
# Usage: MIME_TYPE_TO_BOM['<Google Drive mime type>']['<download mimetype>'].
MIME_TYPE_TO_BOM = {
"application/vnd.google-apps.document": {"text/plain": "\ufeff".encode()}
}
class FileNotUploadedError(RuntimeError):
"""Error trying to access metadata of file that is not uploaded."""
class ApiRequestError(IOError):
def __init__(self, http_error):
assert isinstance(http_error, errors.HttpError)
content = json.loads(http_error.content.decode("utf-8"))
self.error = content.get("error", {}) if content else {}
# Initialize args for backward compatibility
super().__init__(http_error)
def GetField(self, field):
"""Returns the `field` from the first error"""
return self.error.get("errors", [{}])[0].get(field, "")
class FileNotDownloadableError(RuntimeError):
"""Error trying to download file that is not downloadable."""
def LoadMetadata(decoratee):
"""Decorator to check if the file has metadata and fetches it if not.
:raises: ApiRequestError, FileNotUploadedError
"""
@wraps(decoratee)
def _decorated(self, *args, **kwargs):
if not self.uploaded:
self.FetchMetadata()
return decoratee(self, *args, **kwargs)
return _decorated
class GoogleDriveFileList(ApiResourceList):
"""Google Drive FileList instance.
Equivalent to Files.list() in Drive APIs.
"""
def __init__(self, auth=None, param=None):
"""Create an instance of GoogleDriveFileList."""
super().__init__(auth=auth, metadata=param)
@LoadAuth
def _GetList(self):
"""Overwritten method which actually makes API call to list files.
:returns: list -- list of pydrive2.files.GoogleDriveFile.
"""
# Teamdrive support
self["supportsAllDrives"] = True
self["includeItemsFromAllDrives"] = True
try:
self.metadata = (
self.auth.service.files()
.list(**dict(self))
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
result = []
for file_metadata in self.metadata["items"]:
tmp_file = GoogleDriveFile(
auth=self.auth, metadata=file_metadata, uploaded=True
)
result.append(tmp_file)
return result
class IoBuffer:
"""Lightweight retention of one chunk."""
def __init__(self, encoding):
self.encoding = encoding
self.chunk = None
def write(self, chunk):
self.chunk = chunk
def read(self):
return (
self.chunk.decode(self.encoding)
if self.chunk and self.encoding
else self.chunk
)
class MediaIoReadable:
def __init__(
self,
request,
encoding=None,
pre_buffer=True,
remove_prefix=b"",
chunksize=DEFAULT_CHUNK_SIZE,
):
"""File-like wrapper around MediaIoBaseDownload.
:param pre_buffer: Whether to read one chunk into an internal buffer
immediately in order to raise any potential errors.
:param remove_prefix: Bytes prefix to remove from internal pre_buffer.
:raises: ApiRequestError
"""
self.done = False
self._fd = IoBuffer(encoding)
self.downloader = MediaIoBaseDownload(
self._fd, request, chunksize=chunksize
)
self.size = None
self._pre_buffer = False
if pre_buffer:
self.read()
if remove_prefix:
chunk = io.BytesIO(self._fd.chunk)
GoogleDriveFile._RemovePrefix(chunk, remove_prefix)
self._fd.chunk = chunk.getvalue()
self._pre_buffer = True
def read(self):
"""
:returns: bytes or str -- chunk (or None if done)
:raises: ApiRequestError
"""
if self._pre_buffer:
self._pre_buffer = False
return self._fd.read()
if self.done:
return None
try:
status, self.done = self.downloader.next_chunk()
self.size = status.total_size
except errors.HttpError as error:
raise ApiRequestError(error)
return self._fd.read()
def __iter__(self):
"""
:raises: ApiRequestError
"""
while True:
chunk = self.read()
if chunk is None:
break
yield chunk
def __len__(self):
return self.size
class GoogleDriveFile(ApiAttributeMixin, ApiResource):
"""Google Drive File instance.
Inherits ApiResource which inherits dict.
Can access and modify metadata like dictionary.
"""
content = ApiAttribute("content")
uploaded = ApiAttribute("uploaded")
metadata = ApiAttribute("metadata")
def __init__(self, auth=None, metadata=None, uploaded=False):
"""Create an instance of GoogleDriveFile.
:param auth: authorized GoogleAuth instance.
:type auth: pydrive2.auth.GoogleAuth
:param metadata: file resource to initialize GoogleDriveFile with.
:type metadata: dict.
:param uploaded: True if this file is confirmed to be uploaded.
:type uploaded: bool.
"""
ApiAttributeMixin.__init__(self)
ApiResource.__init__(self)
self.metadata = {}
self.dirty = {"content": False}
self.auth = auth
self.uploaded = uploaded
if uploaded:
self.UpdateMetadata(metadata)
elif metadata:
self.update(metadata)
self.has_bom = True
def __getitem__(self, key):
"""Overwrites manner of accessing Files resource.
If this file instance is not uploaded and id is specified,
it will try to look for metadata with Files.get().
:param key: key of dictionary query.
:type key: str.
:returns: value of Files resource
:raises: KeyError, FileNotUploadedError
"""
try:
return dict.__getitem__(self, key)
except KeyError as e:
if self.uploaded:
raise KeyError(e)
if self.get("id"):
self.FetchMetadata()
return dict.__getitem__(self, key)
else:
raise FileNotUploadedError()
def SetContentString(self, content, encoding="utf-8"):
"""Set content of this file to be a string.
Creates io.BytesIO instance of utf-8 encoded string.
Sets mimeType to be 'text/plain' if not specified and file id is not
set (means that we are uploading this file for the first time).
:param encoding: The encoding to use when setting the content of this file.
:type encoding: str
:param content: content of the file in string.
:type content: str
"""
self.content = io.BytesIO(content.encode(encoding))
if self.get("mimeType") is None and self.get("id") is None:
self["mimeType"] = "text/plain"
def SetContentFile(self, filename):
"""Set content of this file from a file.
Opens the file specified by this method.
Will be read, uploaded, and closed by Upload() method.
Sets metadata 'title' and 'mimeType' automatically if not specified and
the file is uploaded for the first time (id is not set).
:param filename: name of the file to be uploaded.
:type filename: str.
"""
self.content = open(filename, "rb")
if self.get("id") is None:
if self.get("title") is None:
self["title"] = os.path.basename(filename)
if self.get("mimeType") is None:
self["mimeType"] = mimetypes.guess_type(filename)[0]
def GetContentString(
self, mimetype=None, encoding="utf-8", remove_bom=False
):
"""Get content of this file as a string.
:param mimetype: The mimetype of the content string.
:type mimetype: str
:param encoding: The encoding to use when decoding the byte string.
:type encoding: str
:param remove_bom: Whether to strip a known BOM.
:type remove_bom: bool
:returns: str -- utf-8 decoded content of the file
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
"""
if (
self.content is None
or not isinstance(self.content, io.BytesIO)
or self.has_bom == remove_bom
):
self.FetchContent(mimetype, remove_bom)
return self.content.getvalue().decode(encoding)
@LoadAuth
def GetContentFile(
self,
filename,
mimetype=None,
remove_bom=False,
callback=None,
chunksize=DEFAULT_CHUNK_SIZE,
acknowledge_abuse=False,
):
"""Save content of this file as a local file.
:param filename: name of the file to write to.
:type filename: str
:param mimetype: mimeType of the file.
:type mimetype: str
:param remove_bom: Whether to remove the byte order marking.
:type remove_bom: bool
:param callback: passed two arguments: (total transferred, file size).
:type param: callable
:param chunksize: chunksize in bytes (standard 100 MB(1024*1024*100))
:type chunksize: int
:param acknowledge_abuse: Acknowledging the risk and download file
identified as abusive.
:type acknowledge_abuse: bool
:raises: ApiRequestError, FileNotUploadedError
"""
files = self.auth.service.files()
file_id = self.metadata.get("id") or self.get("id")
if not file_id:
raise FileNotUploadedError()
def download(fd, request):
downloader = MediaIoBaseDownload(
fd, self._WrapRequest(request), chunksize=chunksize
)
done = False
while done is False:
status, done = downloader.next_chunk()
if callback:
callback(status.resumable_progress, status.total_size)
with open(filename, mode="w+b") as fd:
# Should use files.export_media instead of files.get_media if
# metadata["mimeType"].startswith("application/vnd.google-apps.").
# But that would first require a slow call to FetchMetadata().
# We prefer to try-except for speed.
try:
download(
fd,
files.get_media(
fileId=file_id, acknowledgeAbuse=acknowledge_abuse
),
)
except errors.HttpError as error:
exc = ApiRequestError(error)
if (
exc.error["code"] != 403
or exc.GetField("reason") != "fileNotDownloadable"
):
raise exc
mimetype = mimetype or "text/plain"
fd.seek(0) # just in case `download()` modified `fd`
try:
download(
fd,
files.export_media(fileId=file_id, mimeType=mimetype),
)
except errors.HttpError as error:
raise ApiRequestError(error)
if mimetype == "text/plain" and remove_bom:
fd.seek(0)
bom = self._GetBOM(mimetype)
if bom:
self._RemovePrefix(fd, bom)
@LoadAuth
def GetContentIOBuffer(
self,
mimetype=None,
encoding=None,
remove_bom=False,
chunksize=DEFAULT_CHUNK_SIZE,
acknowledge_abuse=False,
):
"""Get a file-like object which has a buffered read() method.
:param mimetype: mimeType of the file.
:type mimetype: str
:param encoding: The encoding to use when decoding the byte string.
:type encoding: str
:param remove_bom: Whether to remove the byte order marking.
:type remove_bom: bool
:param chunksize: default read()/iter() chunksize.
:type chunksize: int
:param acknowledge_abuse: Acknowledging the risk and download file
identified as abusive.
:type acknowledge_abuse: bool
:returns: MediaIoReadable -- file-like object.
:raises: ApiRequestError, FileNotUploadedError
"""
files = self.auth.service.files()
file_id = self.metadata.get("id") or self.get("id")
if not file_id:
raise FileNotUploadedError()
# Should use files.export_media instead of files.get_media if
# metadata["mimeType"].startswith("application/vnd.google-apps.").
# But that would first require a slow call to FetchMetadata().
# We prefer to try-except for speed.
try:
request = self._WrapRequest(
files.get_media(
fileId=file_id, acknowledgeAbuse=acknowledge_abuse
)
)
return MediaIoReadable(
request, encoding=encoding, chunksize=chunksize
)
except ApiRequestError as exc:
if (
exc.error["code"] != 403
or exc.GetField("reason") != "fileNotDownloadable"
):
raise exc
mimetype = mimetype or "text/plain"
request = self._WrapRequest(
files.export_media(fileId=file_id, mimeType=mimetype)
)
remove_prefix = (
self._GetBOM(mimetype)
if mimetype == "text/plain" and remove_bom
else b""
)
return MediaIoReadable(
request,
encoding=encoding,
remove_prefix=remove_prefix,
chunksize=chunksize,
)
@LoadAuth
def FetchMetadata(self, fields=None, fetch_all=False):
"""Download file's metadata from id using Files.get().
:param fields: The fields to include, as one string, each entry separated
by commas, e.g. 'fields,labels'.
:type fields: str
:param fetch_all: Whether to fetch all fields.
:type fetch_all: bool
:raises: ApiRequestError, FileNotUploadedError
"""
file_id = self.metadata.get("id") or self.get("id")
if fetch_all:
fields = "*"
if file_id:
try:
request = self.auth.service.files().get(
fileId=file_id,
fields=fields,
# Teamdrive support
supportsAllDrives=True,
)
request = self._AddResourceKeyHeaders(request)
metadata = request.execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.uploaded = True
self.UpdateMetadata(metadata)
else:
raise FileNotUploadedError()
@LoadMetadata
def FetchContent(self, mimetype=None, remove_bom=False):
"""Download file's content from download_url.
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
"""
download_url = self.metadata.get("downloadUrl")
export_links = self.metadata.get("exportLinks")
if download_url:
self.content = io.BytesIO(self._DownloadFromUrl(download_url))
self.dirty["content"] = False
elif export_links and export_links.get(mimetype):
self.content = io.BytesIO(
self._DownloadFromUrl(export_links.get(mimetype))
)
self.dirty["content"] = False
else:
raise FileNotDownloadableError(
"No downloadLink/exportLinks for mimetype found in metadata"
)
if mimetype == "text/plain" and remove_bom:
self._RemovePrefix(
self.content, MIME_TYPE_TO_BOM[self["mimeType"]][mimetype]
)
self.has_bom = not remove_bom
def Upload(self, param=None):
"""Upload/update file by choosing the most efficient method.
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError
"""
if self.uploaded or self.get("id") is not None:
if self.dirty["content"]:
self._FilesUpdate(param=param)
else:
self._FilesPatch(param=param)
else:
self._FilesInsert(param=param)
def Trash(self, param=None):
"""Move a file to the trash.
:raises: ApiRequestError
"""
self._FilesTrash(param=param)
def UnTrash(self, param=None):
"""Move a file out of the trash.
:param param: Additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
self._FilesUnTrash(param=param)
def Delete(self, param=None):
"""Hard-delete a file.
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
self._FilesDelete(param=param)
@LoadAuth
def Copy(self, target_folder=None, new_title=None, param=None):
"""Creates a copy of this file. Folders cannot be copied.
:param target_folder: Folder where the file will be copied.
:type target_folder: GoogleDriveFile, optional
:param new_title: Name of the new file.
:type new_title: str, optional
:param param: addition parameters to pass.
:type param: dict, optional
:raises: ApiRequestError
:return: the copied file
:rtype: GoogleDriveFile
"""
if param is None:
param = {}
param["fileId"] = self["id"]
param["supportsAllDrives"] = True
param["body"] = {}
if target_folder:
param["body"]["parents"] = [{"id": target_folder["id"]}]
param["body"]["title"] = new_title
new_file = None
try:
new_file = (
self.auth.service.files().copy(**param).execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
return GoogleDriveFile(self.auth, new_file)
def InsertPermission(self, new_permission, param=None):
"""Insert a new permission. Re-fetches all permissions after call.
:param new_permission: The new permission to insert, please see the
official Google Drive API guide on permissions.insert
for details.
:type new_permission: object
:param param: addition parameters to pass
:type param: dict
:return: The permission object.
:rtype: object
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
param["body"] = new_permission
# Teamdrive support
param["supportsAllDrives"] = True
try:
permission = (
self.auth.service.permissions()
.insert(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.GetPermissions() # Update permissions field.
return permission
@LoadAuth
def GetPermissions(self):
"""Get file's or shared drive's permissions.
For files in a shared drive, at most 100 results will be returned.
It doesn't paginate and collect all results.
:return: A list of the permission objects.
:rtype: object[]
"""
file_id = self.metadata.get("id") or self.get("id")
# We can't do FetchMetada call (which would nicely update
# local metada cache, etc) here since it doesn't return
# permissions for the team drive use case.
permissions = (
self.auth.service.permissions()
.list(
fileId=file_id,
# Teamdrive support
supportsAllDrives=True,
)
.execute(http=self.http)
).get("items")
if permissions:
self["permissions"] = permissions
self.metadata["permissions"] = permissions
return permissions
def DeletePermission(self, permission_id):
"""Deletes the permission specified by the permission_id.
:param permission_id: The permission id.
:type permission_id: str
:return: True if it succeeds.
:rtype: bool
"""
return self._DeletePermission(permission_id)
@LoadAuth
def GetRevisions(self):
"""Get file's or shared drive's revisions.
For files in a shared drive, at most 100 results will be returned.
It doesn't paginate and collect all results.
:return: A list of the revision objects.
:rtype: object[]
"""
file_id = self.metadata.get("id") or self.get("id")
# We can't do FetchMetada call (which would nicely update
# local metada cache, etc) here since it doesn't return
# revisions for the team drive use case.
revisions = (
self.auth.service.revisions()
.list(
fileId=file_id,
)
.execute(http=self.http)
).get("items")
if revisions:
self["revisions"] = revisions
self.metadata["revisions"] = revisions
return revisions
def _WrapRequest(self, request):
"""Replaces request.http with self.http.
Ensures thread safety. Similar to other places where we call
`.execute(http=self.http)` to pass a client from the thread local storage.
"""
if self.http:
request.http = self.http
request = self._AddResourceKeyHeaders(request)
return request
def _AddResourceKeyHeaders(self, request):
"""Add resourceKey headers to request if file is secured with resourceKey and
its available (from a list for example).
:param request: request to add headers to.
:type request: googleapiclient.http.HttpRequest
"""
file_id = self.metadata.get("id") or self.get("id")
if file_id:
resourceKey = self.get("resourceKey")
if resourceKey:
request.headers[
"X-Goog-Drive-Resource-Keys"
] = f"{file_id}/{resourceKey}"
return request
@LoadAuth
def _FilesInsert(self, param=None):
"""Upload a new file using Files.insert().
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["body"] = self.GetChanges()
# teamdrive support
param["supportsAllDrives"] = True
try:
if self.dirty["content"]:
param["media_body"] = self._BuildMediaBody()
metadata = (
self.auth.service.files()
.insert(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.uploaded = True
self.dirty["content"] = False
self.UpdateMetadata(metadata)
@LoadAuth
def _FilesUnTrash(self, param=None):
"""Un-delete (Trash) a file using Files.UnTrash().
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
# Teamdrive support
param["supportsAllDrives"] = True
try:
self.auth.service.files().untrash(**param).execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
if self.metadata:
self.metadata["labels"]["trashed"] = False
return True
@LoadAuth
def _FilesTrash(self, param=None):
"""Soft-delete (Trash) a file using Files.Trash().
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
# Teamdrive support
param["supportsAllDrives"] = True
try:
self.auth.service.files().trash(**param).execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
if self.metadata:
self.metadata["labels"]["trashed"] = True
return True
@LoadAuth
def _FilesDelete(self, param=None):
"""Delete a file using Files.Delete()
(WARNING: deleting permanently deletes the file!)
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
# Teamdrive support
param["supportsAllDrives"] = True
try:
self.auth.service.files().delete(**param).execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
return True
@LoadAuth
def _FilesUpdate(self, param=None):
"""Update metadata and/or content using Files.Update().
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError, FileNotUploadedError
"""
if param is None:
param = {}
param["body"] = self.GetChanges()
param["fileId"] = self.metadata.get("id") or self.get("id")
# Teamdrive support
param["supportsAllDrives"] = True
try:
if self.dirty["content"]:
param["media_body"] = self._BuildMediaBody()
metadata = (
self.auth.service.files()
.update(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.uploaded = True
self.dirty["content"] = False
self.UpdateMetadata(metadata)
@LoadAuth
def _FilesPatch(self, param=None):
"""Update metadata using Files.Patch().
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError, FileNotUploadedError
"""
if param is None:
param = {}
param["body"] = self.GetChanges()
param["fileId"] = self.metadata.get("id") or self.get("id")
# Teamdrive support
param["supportsAllDrives"] = True
try:
metadata = (
self.auth.service.files()
.patch(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.UpdateMetadata(metadata)
def _BuildMediaBody(self):
"""Build MediaIoBaseUpload to get prepared to upload content of the file.
Sets mimeType as 'application/octet-stream' if not specified.
:returns: MediaIoBaseUpload -- instance that will be used to upload content.
"""
if self.get("mimeType") is None:
self["mimeType"] = "application/octet-stream"
return MediaIoBaseUpload(
self.content, self["mimeType"], resumable=True
)
@LoadAuth
def _DownloadFromUrl(self, url):
"""Download file from url using provided credential.
:param url: link of the file to download.
:type url: str.
:returns: str -- content of downloaded file in string.
:raises: ApiRequestError
"""
resp, content = self.http.request(url)
if resp.status != 200:
raise ApiRequestError(errors.HttpError(resp, content, uri=url))
return content
@LoadAuth
def _DeletePermission(self, permission_id):
"""Deletes the permission remotely, and from the file object itself.
:param permission_id: The ID of the permission.
:type permission_id: str
:return: The permission
:rtype: object
"""
file_id = self.metadata.get("id") or self["id"]
try:
self.auth.service.permissions().delete(
fileId=file_id, permissionId=permission_id
).execute()
except errors.HttpError as error:
raise ApiRequestError(error)
else:
if "permissions" in self and "permissions" in self.metadata:
permissions = self["permissions"]
is_not_current_permission = (
lambda per: per["id"] == permission_id
)
permissions = list(
filter(is_not_current_permission, permissions)
)
self["permissions"] = permissions
self.metadata["permissions"] = permissions
return True
@staticmethod
def _GetBOM(mimetype):
"""Based on download mime type (ignores Google Drive mime type)"""
for bom in MIME_TYPE_TO_BOM.values():
if mimetype in bom:
return bom[mimetype]
@staticmethod
def _RemovePrefix(file_object, prefix, block_size=BLOCK_SIZE):
"""Deletes passed prefix by shifting content of passed file object by to
the left. Operation is in-place.
Args:
file_object (obj): The file object to manipulate.
prefix (str): The prefix to insert.
block_size (int): The size of the blocks which are moved one at a time.
"""
prefix_length = len(prefix)
# Detect if prefix exists in file.
content_start = file_object.read(prefix_length)
if content_start == prefix:
# Shift content left by prefix length, by copying 1KiB at a time.
block_to_write = file_object.read(block_size)
current_block_length = len(block_to_write)
# Read and write location in separate variables for simplicity.
read_location = prefix_length + current_block_length
write_location = 0
while current_block_length > 0:
# Write next block.
file_object.seek(write_location)
file_object.write(block_to_write)
# Set write location to the next block.
write_location += len(block_to_write)
# Read next block of input.
file_object.seek(read_location)
block_to_write = file_object.read(block_size)
# Update the current block length and read_location.
current_block_length = len(block_to_write)
read_location += current_block_length
# Truncate the file to its, now shorter, length.
file_object.truncate(read_location - prefix_length)
@staticmethod
def _InsertPrefix(file_object, prefix, block_size=BLOCK_SIZE):
"""Inserts the passed prefix in the beginning of the file, operation is
in-place.
Args:
file_object (obj): The file object to manipulate.
prefix (str): The prefix to insert.
"""
# Read the first two blocks.
first_block = file_object.read(block_size)
second_block = file_object.read(block_size)
# Pointer to the first byte of the next block to be read.
read_location = block_size * 2
# Write BOM.
file_object.seek(0)
file_object.write(prefix)
# {read|write}_location separated for readability.
write_location = len(prefix)
# Write and read block alternatingly.
while len(first_block):
# Write first block.
file_object.seek(write_location)
file_object.write(first_block)
# Increment write_location.
write_location += block_size
# Move second block into first variable.
first_block = second_block