Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
mikf
GitHub Repository: mikf/gallery-dl
Path: blob/master/gallery_dl/job.py
8932 views
1
# -*- coding: utf-8 -*-
2
3
# Copyright 2015-2026 Mike Fährmann
4
#
5
# This program is free software; you can redistribute it and/or modify
6
# it under the terms of the GNU General Public License version 2 as
7
# published by the Free Software Foundation.
8
9
import sys
10
import errno
11
import logging
12
import functools
13
import collections
14
15
from . import (
16
extractor,
17
downloader,
18
postprocessor,
19
archive,
20
config,
21
exception,
22
formatter,
23
output,
24
path,
25
text,
26
util,
27
version,
28
)
29
from .extractor.message import Message
30
stdout_write = output.stdout_write
31
FLAGS = util.FLAGS
32
33
34
class Job():
35
"""Base class for Job types"""
36
ulog = None
37
_logger_adapter = output.LoggerAdapter
38
39
def __init__(self, extr, parent=None):
40
if isinstance(extr, str):
41
extr = extractor.find(extr)
42
if not extr:
43
raise exception.NoExtractorError()
44
45
self.extractor = extr
46
self.pathfmt = None
47
self.status = 0
48
self.kwdict = {}
49
self.kwdict_eval = False
50
51
if cfgpath := self._build_config_path(parent):
52
if isinstance(cfgpath, list):
53
extr.config = extr._config_shared
54
extr.config_accumulate = extr._config_shared_accumulate
55
extr._cfgpath = cfgpath
56
57
if actions := extr.config("actions"):
58
from .actions import LoggerAdapter, parse_logging
59
self._logger_adapter = LoggerAdapter
60
self._logger_actions = parse_logging(actions)
61
62
path_proxy = output.PathfmtProxy(self)
63
self._logger_extra = {
64
"job" : self,
65
"extractor": extr,
66
"path" : path_proxy,
67
"keywords" : output.KwdictProxy(self),
68
}
69
extr.log = self._wrap_logger(extr.log)
70
extr.log.debug("Using %s for '%s'", extr.__class__.__name__, extr.url)
71
72
self.metadata_url = extr.config2(
73
"metadata-url", "url-metadata", "_url")
74
self.metadata_http = extr.config2(
75
"metadata-http", "http-metadata")
76
metadata_path = extr.config2(
77
"metadata-path", "path-metadata", "_path")
78
metadata_version = extr.config2(
79
"metadata-version", "version-metadata")
80
metadata_extractor = extr.config2(
81
"metadata-extractor", "extractor-metadata", "_extr")
82
83
if metadata_path:
84
self.kwdict[metadata_path] = path_proxy
85
if metadata_extractor:
86
self.kwdict[metadata_extractor] = extr
87
if metadata_version:
88
self.kwdict[metadata_version] = {
89
"version" : version.__version__,
90
"is_executable" : util.EXECUTABLE,
91
"current_git_head": util.git_head()
92
}
93
# user-supplied metadata
94
kwdict = extr.config("keywords")
95
if kwdict_global := extr.config("keywords-global"):
96
kwdict = {**kwdict_global, **kwdict} if kwdict else kwdict_global
97
elif not kwdict:
98
return
99
100
if extr.config("keywords-eval"):
101
self.kwdict_eval = []
102
for key, value in kwdict.items():
103
if isinstance(value, str):
104
fmt = formatter.parse(value, None, util.identity)
105
self.kwdict_eval.append((key, fmt.format_map))
106
else:
107
self.kwdict[key] = value
108
else:
109
self.kwdict.update(kwdict)
110
111
def _build_config_path(self, parent):
112
extr = self.extractor
113
cfgpath = []
114
115
if parent is None:
116
self.parents = ()
117
else:
118
pextr = parent.extractor
119
if extr.category == pextr.category or \
120
extr.category in parent.parents:
121
parents = parent.parents
122
else:
123
parents = parent.parents + (pextr.category,)
124
self.parents = parents
125
126
if pextr.config("category-transfer", pextr.categorytransfer):
127
extr.category = pextr.category
128
extr.subcategory = pextr.subcategory
129
return pextr._cfgpath
130
131
if parents:
132
sub = extr.subcategory
133
for category in parents:
134
cat = f"{category}>{extr.category}"
135
cfgpath.append((cat, sub))
136
cfgpath.append((category + ">*", sub))
137
cfgpath.append((extr.category, sub))
138
139
if extr.basecategory:
140
if not cfgpath:
141
cfgpath.append((extr.category, extr.subcategory))
142
if extr.basesubcategory:
143
cfgpath.append((extr.basesubcategory, extr.subcategory))
144
cfgpath.append((extr.basecategory, extr.subcategory))
145
146
return cfgpath
147
148
def run(self):
149
"""Execute or run the job"""
150
extractor = self.extractor
151
log = extractor.log
152
153
self._init()
154
155
# sleep before extractor start
156
sleep = util.build_duration_func(
157
extractor.config("sleep-extractor"))
158
if sleep is not None:
159
extractor.sleep(sleep(), "extractor")
160
161
try:
162
msg = self.dispatch(extractor)
163
except exception.StopExtraction as exc:
164
if exc.depth > 1 and exc.target != extractor.__class__.subcategory:
165
exc.depth -= 1
166
raise
167
pass
168
except exception.AbortExtraction as exc:
169
log.traceback(exc)
170
log.error(exc.message)
171
self.status |= exc.code
172
except (exception.TerminateExtraction, exception.RestartExtraction):
173
raise
174
except exception.GalleryDLException as exc:
175
log.error("%s: %s", exc.__class__.__name__, exc)
176
log.traceback(exc)
177
self.status |= exc.code
178
except OSError as exc:
179
log.traceback(exc)
180
if (name := exc.__class__.__name__) == "JSONDecodeError":
181
log.error("Failed to parse JSON data: %s: %s", name, exc)
182
self.status |= 1
183
else: # regular OSError
184
log.error("Unable to download data: %s: %s", name, exc)
185
self.status |= 128
186
except Exception as exc:
187
log.error(("An unexpected error occurred: %s - %s. "
188
"Please run gallery-dl again with the --verbose flag, "
189
"copy its output and report this issue on "
190
"https://github.com/mikf/gallery-dl/issues ."),
191
exc.__class__.__name__, exc)
192
log.traceback(exc)
193
self.status |= 1
194
except BaseException:
195
self.status |= 1
196
raise
197
else:
198
if msg is None:
199
log.info("No results for %s", extractor.url)
200
finally:
201
if extractor.status:
202
self.status |= extractor.status
203
self.handle_finalize()
204
if extractor.finalize is not None:
205
extractor.finalize(self.status)
206
207
return self.status
208
209
def dispatch(self, messages):
210
"""Call the appropriate message handler"""
211
msg = None
212
process = True
213
metadata_url = self.metadata_url
214
215
if follow := self.extractor.config("follow"):
216
follow = formatter.parse(follow, None, util.identity).format_map
217
follow_urls = follow_kwdict = None
218
else:
219
follow = follow_urls = None
220
221
for msg, url, kwdict in messages:
222
223
if msg == Message.Directory:
224
if follow_urls is not None:
225
for furl in follow_urls:
226
if metadata_url is not None:
227
follow_kwdict[metadata_url] = furl
228
if self.pred_queue(furl, follow_kwdict):
229
self.handle_queue(furl, follow_kwdict)
230
follow_urls = None
231
232
self.update_kwdict(kwdict)
233
if self.pred_post(url, kwdict):
234
process = True
235
self.handle_directory(kwdict)
236
if follow is not None:
237
follow_urls = self._collect_urls(follow(kwdict))
238
if follow_urls is not None:
239
follow_kwdict = kwdict.copy()
240
else:
241
process = None
242
if FLAGS.POST is not None:
243
FLAGS.process("POST")
244
245
elif process is None:
246
continue
247
elif FLAGS.POST is False:
248
FLAGS.POST = process = None
249
continue
250
251
elif msg == Message.Url:
252
if metadata_url is not None:
253
kwdict[metadata_url] = url
254
self.update_kwdict(kwdict)
255
if self.pred_url(url, kwdict):
256
if FLAGS.FILE is False:
257
FLAGS.FILE = None
258
continue
259
self.handle_url(url, kwdict)
260
if FLAGS.FILE is not None:
261
FLAGS.process("FILE")
262
263
elif msg == Message.Queue:
264
self.update_kwdict(kwdict)
265
if metadata_url is not None:
266
kwdict[metadata_url] = url
267
if self.pred_queue(url, kwdict):
268
if FLAGS.CHILD is False:
269
FLAGS.CHILD = None
270
continue
271
self.handle_queue(url, kwdict)
272
if FLAGS.CHILD is not None:
273
FLAGS.process("CHILD")
274
275
if follow_urls is not None:
276
for furl in follow_urls:
277
if metadata_url is not None:
278
follow_kwdict[metadata_url] = furl
279
if self.pred_queue(furl, follow_kwdict):
280
self.handle_queue(furl, follow_kwdict)
281
282
return msg
283
284
def handle_url(self, url, kwdict):
285
"""Handle Message.Url"""
286
287
def handle_directory(self, kwdict):
288
"""Handle Message.Directory"""
289
290
def handle_queue(self, url, kwdict):
291
"""Handle Message.Queue"""
292
293
def handle_finalize(self):
294
"""Handle job finalization"""
295
296
def update_kwdict(self, kwdict):
297
"""Update 'kwdict' with additional metadata"""
298
extr = self.extractor
299
kwdict["category"] = extr.category
300
kwdict["subcategory"] = extr.subcategory
301
if self.metadata_http:
302
kwdict.pop(self.metadata_http, None)
303
if extr.kwdict:
304
kwdict.update(extr.kwdict)
305
if self.kwdict:
306
kwdict.update(self.kwdict)
307
if self.kwdict_eval:
308
for key, valuegen in self.kwdict_eval:
309
kwdict[key] = valuegen(kwdict)
310
311
def initialize(self):
312
pass
313
314
def _init(self):
315
extr = self.extractor
316
317
extr.initialize()
318
self.pred_url = self._prepare_predicates(
319
"file", "image", extr.skip)
320
self.pred_post = self._prepare_predicates(
321
"post", None, None)
322
self.pred_queue = self._prepare_predicates(
323
"child", "chapter", None)
324
325
init = extr.config("init", False)
326
if init and init != "lazy":
327
self.initialize()
328
329
def _collect_urls(self, source):
330
if not source:
331
return None
332
if isinstance(source, list):
333
return source
334
if isinstance(source, str):
335
if urls := text.extract_urls(source):
336
return urls
337
338
def _prepare_predicates(self, target, alt=None, skip=None):
339
predicates = []
340
extr = self.extractor
341
342
if extr.config(target + "-unique") or \
343
alt is not None and extr.config(alt + "-unique"):
344
predicates.append(util.predicate_unique())
345
346
if (pfilter := extr.config(target + "-filter")) or \
347
alt is not None and (pfilter := extr.config(alt + "-filter")):
348
try:
349
predicates.append(util.predicate_filter(pfilter, target))
350
except (SyntaxError, ValueError, TypeError) as exc:
351
extr.log.warning(exc)
352
353
if (prange := extr.config(target + "-range")) or \
354
alt is not None and (prange := extr.config(alt + "-range")):
355
try:
356
if pfilter:
357
skip = None
358
flag = target if alt is not None else None
359
predicates.append(util.predicate_range(prange, skip, flag))
360
except ValueError as exc:
361
extr.log.warning("invalid %s range: %s", target, exc)
362
363
return util.predicate_build(predicates)
364
365
def get_logger(self, name):
366
return self._wrap_logger(logging.getLogger(name))
367
368
def _wrap_logger(self, logger):
369
return self._logger_adapter(logger, self)
370
371
def _write_unsupported(self, url):
372
if self.ulog is not None:
373
self.ulog.info(url)
374
375
376
class DownloadJob(Job):
377
"""Download images into appropriate directory/filename locations"""
378
379
def __init__(self, url, parent=None):
380
Job.__init__(self, url, parent)
381
self.log = self.get_logger("download")
382
self.fallback = None
383
self.archive = None
384
self.sleep = None
385
self.hooks = ()
386
self.downloaders = {}
387
self.out = output.select()
388
self.visited = set() if parent is None else parent.visited
389
self._extractor_filter = None
390
self._skipcnt = 0
391
392
def handle_url(self, url, kwdict):
393
"""Download the resource specified in 'url'"""
394
hooks = self.hooks
395
pathfmt = self.pathfmt
396
archive = self.archive
397
398
# prepare download
399
pathfmt.set_filename(kwdict)
400
401
if "prepare" in hooks:
402
for callback in hooks["prepare"]:
403
callback(pathfmt)
404
405
if archive is not None and archive.check(kwdict):
406
pathfmt.fix_extension()
407
self.handle_skip()
408
return
409
410
if pathfmt.extension and not self.metadata_http:
411
pathfmt.build_path()
412
413
if pathfmt.exists():
414
if archive is not None and self._archive_write_skip:
415
archive.add(kwdict)
416
self.handle_skip()
417
return
418
419
if "prepare-after" in hooks:
420
for callback in hooks["prepare-after"]:
421
callback(pathfmt)
422
423
if kwdict.pop("_file_recheck", False) and pathfmt.exists():
424
if archive is not None and self._archive_write_skip:
425
archive.add(kwdict)
426
self.handle_skip()
427
return
428
429
if self.sleep is not None:
430
self.extractor.sleep(self.sleep(), "download")
431
432
# download from URL
433
if not self.download(url):
434
435
# use fallback URLs if available/enabled
436
fallback = kwdict.get("_fallback", ()) if self.fallback else ()
437
for num, url in enumerate(fallback, 1):
438
util.remove_file(pathfmt.temppath)
439
self.log.info("Trying fallback URL #%d", num)
440
if self.download(url):
441
break
442
else:
443
# download failed
444
self.status |= 4
445
self.log.error("Failed to download %s",
446
pathfmt.filename or url)
447
if "error" in hooks:
448
for callback in hooks["error"]:
449
callback(pathfmt)
450
return
451
452
if not pathfmt.temppath:
453
if archive is not None and self._archive_write_skip:
454
archive.add(kwdict)
455
self.handle_skip()
456
return
457
458
# run post processors
459
if "file" in hooks:
460
for callback in hooks["file"]:
461
callback(pathfmt)
462
463
# download succeeded
464
pathfmt.finalize()
465
self.out.success(pathfmt.path)
466
self._skipcnt = 0
467
if archive is not None and self._archive_write_file:
468
archive.add(kwdict)
469
if "after" in hooks:
470
for callback in hooks["after"]:
471
callback(pathfmt)
472
if archive is not None and self._archive_write_after:
473
archive.add(kwdict)
474
475
def handle_directory(self, kwdict):
476
"""Set and create the target directory for downloads"""
477
if self.pathfmt is None:
478
self.initialize(kwdict)
479
else:
480
if "post-after" in self.hooks:
481
for callback in self.hooks["post-after"]:
482
callback(self.pathfmt)
483
if FLAGS.POST is not None:
484
FLAGS.process("POST")
485
self.pathfmt.set_directory(kwdict)
486
if "post" in self.hooks:
487
for callback in self.hooks["post"]:
488
callback(self.pathfmt)
489
490
def handle_queue(self, url, kwdict):
491
if url in self.visited:
492
return
493
self.visited.add(url)
494
495
if "child" in self.hooks:
496
pathfmt = self.pathfmt
497
pathfmt.kwdict = kwdict
498
for callback in self.hooks["child"]:
499
callback(pathfmt)
500
501
if cls := kwdict.get("_extractor"):
502
extr = cls.from_url(url)
503
else:
504
if extr := extractor.find(url):
505
if self._extractor_filter is None:
506
self._extractor_filter = self._build_extractor_filter()
507
if not self._extractor_filter(extr):
508
extr = None
509
510
if extr:
511
job = self.__class__(extr, self)
512
pfmt = self.pathfmt
513
pextr = self.extractor
514
parent = pextr.config("parent", pextr.parent)
515
516
if pfmt and pextr.config("parent-directory", parent):
517
extr._parentdir = pfmt.directory
518
else:
519
extr._parentdir = pextr._parentdir
520
521
if pmeta := pextr.config2(
522
"parent-metadata", "metadata-parent", parent or "_parent"):
523
if isinstance(pmeta, str):
524
data = self.kwdict.copy()
525
if kwdict:
526
data.update(kwdict)
527
job.kwdict[pmeta] = data
528
else:
529
if self.kwdict:
530
job.kwdict.update(self.kwdict)
531
if kwdict:
532
job.kwdict.update(kwdict)
533
if "_extractor" in kwdict:
534
del job.kwdict["_extractor"]
535
536
if pextr.config("parent-session", parent):
537
extr.session = pextr.session
538
539
while True:
540
try:
541
if pextr.config("parent-skip", parent):
542
job._skipcnt = self._skipcnt
543
status = job.run()
544
self._skipcnt = job._skipcnt
545
else:
546
status = job.run()
547
548
if status:
549
self.status |= status
550
if (status & 95 and # not FormatError or OSError
551
"_fallback" in kwdict and self.fallback):
552
fallback = kwdict["_fallback"] = \
553
iter(kwdict["_fallback"])
554
try:
555
url = next(fallback)
556
except StopIteration:
557
pass
558
else:
559
pextr.log.info("Downloading fallback URL")
560
text.nameext_from_url(url, kwdict)
561
if kwdict["filename"].startswith((
562
"HLS", "DASH")):
563
kwdict["filename"] = url.rsplit("/", 2)[-2]
564
if url.startswith("ytdl:"):
565
kwdict["extension"] = "mp4"
566
self.handle_url(url, kwdict)
567
break
568
except exception.RestartExtraction:
569
pass
570
571
else:
572
self._write_unsupported(url)
573
574
if "child-after" in self.hooks:
575
pathfmt = self.pathfmt
576
pathfmt.kwdict = kwdict
577
for callback in self.hooks["child-after"]:
578
callback(pathfmt)
579
580
def handle_finalize(self):
581
if self.archive:
582
if not self.status:
583
self.archive.finalize()
584
self.archive.close()
585
586
if pathfmt := self.pathfmt:
587
hooks = self.hooks
588
if "post-after" in hooks:
589
for callback in hooks["post-after"]:
590
callback(pathfmt)
591
592
self.extractor.cookies_store()
593
594
if self.status:
595
if "finalize-error" in hooks:
596
for callback in hooks["finalize-error"]:
597
callback(pathfmt)
598
else:
599
if "finalize-success" in hooks:
600
for callback in hooks["finalize-success"]:
601
callback(pathfmt)
602
if "finalize" in hooks:
603
for callback in hooks["finalize"]:
604
callback(pathfmt)
605
606
def handle_skip(self):
607
pathfmt = self.pathfmt
608
if "skip" in self.hooks:
609
for callback in self.hooks["skip"]:
610
callback(pathfmt)
611
self.out.skip(pathfmt.path)
612
613
if self._skipexc is not None:
614
if self._skipftr is None or self._skipftr(pathfmt.kwdict):
615
self._skipcnt += 1
616
if self._skipcnt >= self._skipmax:
617
raise self._skipexc
618
619
if self.sleep_skip is not None:
620
self.extractor.sleep(self.sleep_skip(), "skip")
621
622
def download(self, url):
623
"""Download 'url'"""
624
if downloader := self.get_downloader(url[:url.find(":")]):
625
try:
626
return downloader.download(url, self.pathfmt)
627
except OSError as exc:
628
if exc.errno == errno.ENOSPC:
629
raise
630
self.log.warning("%s: %s", exc.__class__.__name__, exc)
631
return False
632
self._write_unsupported(url)
633
return False
634
635
def get_downloader(self, scheme):
636
"""Return a downloader suitable for 'scheme'"""
637
try:
638
return self.downloaders[scheme]
639
except KeyError:
640
pass
641
642
cls = downloader.find(scheme)
643
if cls and config.get(("downloader", cls.scheme), "enabled", True):
644
instance = cls(self)
645
else:
646
instance = None
647
self.log.error("'%s:' URLs are not supported/enabled", scheme)
648
649
if cls and cls.scheme == "http":
650
self.downloaders["http"] = self.downloaders["https"] = instance
651
else:
652
self.downloaders[scheme] = instance
653
return instance
654
655
def initialize(self, kwdict=None):
656
"""initialize PathFormat, postprocessors, archive, options, etc"""
657
extr = self.extractor
658
cfg = extr.config
659
660
pathfmt = self.pathfmt = path.PathFormat(extr)
661
if kwdict is not None:
662
pathfmt.set_directory(kwdict)
663
664
self.sleep = util.build_duration_func(cfg("sleep"))
665
self.sleep_skip = util.build_duration_func(cfg("sleep-skip"))
666
self.fallback = cfg("fallback", True)
667
if not cfg("download", True):
668
# monkey-patch method to do nothing and always return True
669
self.download = pathfmt.fix_extension
670
671
if archive_path := cfg("archive"):
672
archive_table = cfg("archive-table")
673
archive_prefix = cfg("archive-prefix")
674
if archive_prefix is None:
675
archive_prefix = extr.category if archive_table is None else ""
676
677
archive_format = cfg("archive-format")
678
if archive_format is None:
679
archive_format = extr.archive_fmt
680
681
try:
682
self.archive = archive.connect(
683
archive_path,
684
archive_prefix,
685
archive_format,
686
archive_table,
687
cfg("archive-mode"),
688
cfg("archive-pragma"),
689
kwdict,
690
)
691
except Exception as exc:
692
extr.log.warning(
693
"Failed to open download archive at '%s' (%s: %s)",
694
archive_path, exc.__class__.__name__, exc)
695
else:
696
extr.log.debug("Using download archive '%s'", archive_path)
697
698
events = cfg("archive-event")
699
if events is None:
700
self._archive_write_file = True
701
self._archive_write_skip = False
702
self._archive_write_after = False
703
else:
704
if isinstance(events, str):
705
events = events.split(",")
706
self._archive_write_file = ("file" in events)
707
self._archive_write_skip = ("skip" in events)
708
self._archive_write_after = ("after" in events)
709
710
if skip := cfg("skip", True):
711
self._skipexc = None
712
if skip == "enumerate":
713
pathfmt.check_file = pathfmt._enum_file
714
elif isinstance(skip, str):
715
skip, _, smax = skip.partition(":")
716
if skip == "abort":
717
smax, _, sarg = smax.partition(":")
718
self._skipexc = exception.StopExtraction(sarg or None)
719
elif skip == "terminate":
720
self._skipexc = exception.TerminateExtraction
721
elif skip == "exit":
722
self._skipexc = SystemExit
723
self._skipmax = text.parse_int(smax)
724
725
if skip_filter := cfg("skip-filter"):
726
self._skipftr = util.compile_filter(skip_filter)
727
else:
728
self._skipftr = None
729
else:
730
# monkey-patch methods to always return False
731
pathfmt.exists = lambda x=None: False
732
if self.archive is not None:
733
self.archive.check = pathfmt.exists
734
735
if not cfg("postprocess", True):
736
return
737
738
if postprocessors := extr.config_accumulate("postprocessors"):
739
self.hooks = collections.defaultdict(list)
740
741
pp_log = self.get_logger("postprocessor")
742
pp_conf = config.get((), "postprocessor") or {}
743
pp_opts = cfg("postprocessor-options")
744
pp_list = []
745
746
for pp_dict in postprocessors:
747
if isinstance(pp_dict, str):
748
pp_dict = pp_conf.get(pp_dict) or {"name": pp_dict}
749
elif "type" in pp_dict:
750
pp_type = pp_dict["type"]
751
if pp_type in pp_conf:
752
pp = pp_conf[pp_type].copy()
753
pp.update(pp_dict)
754
pp_dict = pp
755
if "name" not in pp_dict:
756
pp_dict["name"] = pp_type
757
if pp_opts:
758
pp_dict = pp_dict.copy()
759
pp_dict.update(pp_opts)
760
761
clist = pp_dict.get("whitelist")
762
if clist is not None:
763
negate = False
764
else:
765
clist = pp_dict.get("blacklist")
766
negate = True
767
if clist and not util.build_extractor_filter(
768
clist, negate)(extr):
769
continue
770
771
name = pp_dict.get("name", "")
772
if "__init__" not in pp_dict:
773
name, sep, event = name.rpartition("@")
774
if sep:
775
pp_dict["name"] = name
776
if "event" not in pp_dict:
777
pp_dict["event"] = event
778
else:
779
name = event
780
781
name, sep, mode = name.rpartition("/")
782
if sep:
783
pp_dict["name"] = name
784
if "mode" not in pp_dict:
785
pp_dict["mode"] = mode
786
else:
787
name = mode
788
789
pp_dict["__init__"] = None
790
791
pp_cls = postprocessor.find(name)
792
if pp_cls is None:
793
pp_log.warning("module '%s' not found", name)
794
continue
795
try:
796
pp_obj = pp_cls(self, pp_dict)
797
except Exception as exc:
798
pp_log.traceback(exc)
799
pp_log.error("'%s' initialization failed: %s: %s",
800
name, exc.__class__.__name__, exc)
801
else:
802
pp_list.append(pp_obj)
803
804
if pp_list:
805
extr.log.debug("Active postprocessor modules: %s", pp_list)
806
if "init" in self.hooks:
807
for callback in self.hooks["init"]:
808
callback(pathfmt)
809
810
def register_hooks(self, hooks, options=None):
811
expr = options.get("filter") if options else None
812
813
if expr:
814
condition = util.compile_filter(expr)
815
for hook, callback in hooks.items():
816
self.hooks[hook].append(functools.partial(
817
_call_hook_condition, callback, condition))
818
else:
819
for hook, callback in hooks.items():
820
self.hooks[hook].append(callback)
821
822
def _build_extractor_filter(self):
823
clist = self.extractor.config("whitelist")
824
if clist is not None:
825
negate = False
826
special = None
827
else:
828
clist = self.extractor.config("blacklist")
829
negate = True
830
special = util.SPECIAL_EXTRACTORS
831
if clist is None:
832
clist = (self.extractor.category,)
833
834
return util.build_extractor_filter(clist, negate, special)
835
836
837
def _call_hook_condition(callback, condition, pathfmt):
838
if condition(pathfmt.kwdict):
839
callback(pathfmt)
840
841
842
class SimulationJob(DownloadJob):
843
"""Simulate the extraction process without downloading anything"""
844
845
def handle_url(self, url, kwdict):
846
ext = kwdict["extension"] or "jpg"
847
kwdict["extension"] = self.pathfmt.extension_map(ext, ext)
848
if self.sleep is not None:
849
self.extractor.sleep(self.sleep(), "download")
850
if self.archive is not None and self._archive_write_skip:
851
self.archive.add(kwdict)
852
self.out.skip(self.pathfmt.build_filename(kwdict))
853
854
def handle_directory(self, kwdict):
855
if self.pathfmt is None:
856
self.initialize()
857
858
859
class KeywordJob(Job):
860
"""Print available keywords"""
861
862
def __init__(self, url, parent=None):
863
Job.__init__(self, url, parent)
864
self.private = config.get(("output",), "private")
865
866
def handle_url(self, url, kwdict):
867
stdout_write("\nKeywords for filenames and --filter:\n"
868
"------------------------------------\n")
869
870
if self.metadata_http and url.startswith("http"):
871
kwdict[self.metadata_http] = util.extract_headers(
872
self.extractor.request(url, method="HEAD"))
873
874
self.print_kwdict(kwdict)
875
raise exception.StopExtraction()
876
877
def handle_directory(self, kwdict):
878
stdout_write("Keywords for directory names:\n"
879
"-----------------------------\n")
880
self.print_kwdict(kwdict)
881
882
def handle_queue(self, url, kwdict):
883
extr = None
884
if "_extractor" in kwdict:
885
extr = kwdict["_extractor"].from_url(url)
886
887
if not util.filter_dict(kwdict):
888
self.extractor.log.info(
889
"This extractor only spawns other extractors "
890
"and does not provide any metadata on its own.")
891
892
if extr:
893
self.extractor.log.info(
894
"Showing results for '%s' instead:\n", url)
895
KeywordJob(extr, self).run()
896
else:
897
self.extractor.log.info(
898
"Try 'gallery-dl -K \"%s\"' instead.", url)
899
else:
900
stdout_write("Keywords for --chapter-filter:\n"
901
"------------------------------\n")
902
self.print_kwdict(kwdict)
903
if extr or self.extractor.categorytransfer:
904
stdout_write("\n")
905
KeywordJob(extr or url, self).run()
906
raise exception.StopExtraction()
907
908
def print_kwdict(self, kwdict, prefix="", markers=None):
909
"""Print key-value pairs in 'kwdict' with formatting"""
910
write = sys.stdout.write
911
suffix = "']" if prefix else ""
912
913
markerid = id(kwdict)
914
if markers is None:
915
markers = {markerid}
916
elif markerid in markers:
917
write(f"{prefix[:-2]}\n <circular reference>\n")
918
return # ignore circular reference
919
else:
920
markers.add(markerid)
921
922
for key, value in sorted(kwdict.items()):
923
if key[0] == "_" and not self.private:
924
continue
925
key = prefix + key + suffix
926
927
if isinstance(value, dict):
928
self.print_kwdict(value, key + "['", markers)
929
930
elif isinstance(value, list):
931
if not value:
932
pass
933
elif isinstance(value[0], dict):
934
self.print_kwdict(value[0], key + "[N]['", markers)
935
else:
936
fmt = (" {:>%s} {}\n" % len(str(len(value)))).format
937
write(key + "[N]\n")
938
for idx, val in enumerate(value, 0):
939
write(fmt(idx, val))
940
941
else:
942
# string or number
943
write(f"{key}\n {value}\n")
944
945
markers.remove(markerid)
946
947
948
class UrlJob(Job):
949
"""Print download urls"""
950
maxdepth = 1
951
952
def __init__(self, url, parent=None, depth=1):
953
Job.__init__(self, url, parent)
954
self.depth = depth
955
if depth >= self.maxdepth:
956
self.handle_queue = self.handle_url
957
958
def handle_url(self, url, _):
959
stdout_write(url + "\n")
960
961
def handle_url_fallback(self, url, kwdict):
962
stdout_write(url + "\n")
963
if "_fallback" in kwdict:
964
for url in kwdict["_fallback"]:
965
stdout_write(f"| {url}\n")
966
967
def handle_queue(self, url, kwdict):
968
if cls := kwdict.get("_extractor"):
969
extr = cls.from_url(url)
970
else:
971
extr = extractor.find(url)
972
973
if extr:
974
self.status |= self.__class__(extr, self, self.depth + 1).run()
975
else:
976
self._write_unsupported(url)
977
978
979
class InfoJob(Job):
980
"""Print extractor defaults and settings"""
981
982
def run(self):
983
ex = self.extractor
984
pm = self._print_multi
985
pc = self._print_config
986
987
if ex.basecategory:
988
pm("Category / Subcategory / Basecategory",
989
ex.category, ex.subcategory, ex.basecategory)
990
else:
991
pm("Category / Subcategory", ex.category, ex.subcategory)
992
993
pc("Filename format", "filename", ex.filename_fmt)
994
pc("Directory format", "directory", ex.directory_fmt)
995
pc("Archive format", "archive-format", ex.archive_fmt)
996
pc("Request interval", "sleep-request", ex.request_interval)
997
998
return 0
999
1000
def _print_multi(self, title, *values):
1001
stdout_write(
1002
f"{title}\n {' / '.join(map(util.json_dumps, values))}\n\n")
1003
1004
def _print_config(self, title, optname, value):
1005
optval = self.extractor.config(optname, util.SENTINEL)
1006
if optval is not util.SENTINEL:
1007
stdout_write(
1008
f"{title} (custom):\n {util.json_dumps(optval)}\n"
1009
f"{title} (default):\n {util.json_dumps(value)}\n\n")
1010
elif value:
1011
stdout_write(
1012
f"{title} (default):\n {util.json_dumps(value)}\n\n")
1013
1014
1015
class DataJob(Job):
1016
"""Collect extractor results and dump them"""
1017
resolve = False
1018
1019
def __init__(self, url, parent=None, file=sys.stdout, ensure_ascii=True,
1020
resolve=False):
1021
Job.__init__(self, url, parent)
1022
self.file = file
1023
self.data = []
1024
self.data_urls = []
1025
self.data_post = []
1026
self.data_meta = []
1027
self.exception = None
1028
self.ascii = config.get(("output",), "ascii", ensure_ascii)
1029
self.jsonl = config.get(("output",), "jsonl", False)
1030
self.resolve = 128 if resolve is True else (resolve or self.resolve)
1031
1032
private = config.get(("output",), "private")
1033
self.filter = dict.copy if private else util.filter_dict
1034
1035
if self.resolve > 0:
1036
self.handle_queue = self.handle_queue_resolve
1037
if not self.jsonl:
1038
self.out = util.noop
1039
1040
def run(self):
1041
self._init()
1042
1043
extractor = self.extractor
1044
sleep = util.build_duration_func(
1045
extractor.config("sleep-extractor"))
1046
if sleep is not None:
1047
extractor.sleep(sleep(), "extractor")
1048
1049
# collect data
1050
try:
1051
self.dispatch(extractor)
1052
except exception.StopExtraction:
1053
pass
1054
except Exception as exc:
1055
self.exception = exc
1056
self.data.append((-1, {
1057
"error" : exc.__class__.__name__,
1058
"message": str(exc),
1059
}))
1060
except BaseException:
1061
pass
1062
1063
# convert numbers to string
1064
if config.get(("output",), "num-to-str", False):
1065
for msg in self.data:
1066
util.transform_dict(msg[-1], util.number_to_string)
1067
1068
if self.file and not self.jsonl:
1069
# dump to 'file'
1070
try:
1071
util.dump_json(self.data, self.file, self.ascii, 2)
1072
self.file.flush()
1073
except Exception:
1074
pass
1075
1076
return 0
1077
1078
def out(self, msg):
1079
self.file.write(util.json_dumps(msg))
1080
self.file.write("\n")
1081
self.file.flush()
1082
1083
def handle_url(self, url, kwdict):
1084
kwdict = self.filter(kwdict)
1085
self.out(msg := (Message.Url, url, kwdict))
1086
self.data_urls.append(url)
1087
self.data_meta.append(kwdict)
1088
self.data.append(msg)
1089
1090
def handle_directory(self, kwdict):
1091
kwdict = self.filter(kwdict)
1092
self.out(msg := (Message.Directory, kwdict))
1093
self.data_post.append(kwdict)
1094
self.data.append(msg)
1095
1096
def handle_queue(self, url, kwdict):
1097
kwdict = self.filter(kwdict)
1098
self.out(msg := (Message.Queue, url, kwdict))
1099
self.data_urls.append(url)
1100
self.data_meta.append(kwdict)
1101
self.data.append(msg)
1102
1103
def handle_queue_resolve(self, url, kwdict):
1104
if cls := kwdict.get("_extractor"):
1105
extr = cls.from_url(url)
1106
else:
1107
extr = extractor.find(url)
1108
1109
if not extr:
1110
kwdict = self.filter(kwdict)
1111
self.out(msg := (Message.Queue, url, kwdict))
1112
self.data_urls.append(url)
1113
self.data_meta.append(kwdict)
1114
return self.data.append(msg)
1115
1116
job = self.__class__(extr, self, None, self.ascii, self.resolve-1)
1117
job.data = self.data
1118
job.data_urls = self.data_urls
1119
job.data_post = self.data_post
1120
job.data_meta = self.data_meta
1121
job.run()
1122
1123