Fix search; use youtube-dlc

This commit is contained in:
Cadence Ember 2020-12-03 16:32:31 +13:00
parent be1b1bc3a9
commit ba88c53857
No known key found for this signature in database
GPG Key ID: BC1C2C61CF521B17
5 changed files with 10 additions and 8 deletions

View File

@ -1,6 +1,6 @@
import requests
import traceback
import youtube_dl
import youtube_dlc
from tools.converters import *
from tools.extractors import extract_yt_initial_data
from cachetools import TTLCache
@ -13,7 +13,7 @@ ytdl_opts = {
"playlist_items": "1-100",
"extract_flat": "in_playlist"
}
ytdl = youtube_dl.YoutubeDL(ytdl_opts)
ytdl = youtube_dlc.YoutubeDL(ytdl_opts)
def extract_search(q):
try:

View File

@ -4,7 +4,7 @@ import json
import os
import re
import traceback
import youtube_dl
import youtube_dlc
import urllib.error
from tools.converters import *
from tools.extractors import extract_yt_initial_data
@ -21,7 +21,7 @@ ytdl_opts = {
"write_pages": True,
"source_address": "0.0.0.0"
}
ytdl = youtube_dl.YoutubeDL(ytdl_opts)
ytdl = youtube_dlc.YoutubeDL(ytdl_opts)
def get_created_files(id):
if id[0] == "-":
@ -171,7 +171,7 @@ def extract_video(id):
return result
except youtube_dl.DownloadError as e:
except youtube_dlc.DownloadError as e:
if isinstance(e.exc_info[1], urllib.error.HTTPError):
if e.exc_info[1].code == 429:
result = {

View File

@ -1,7 +1,7 @@
import configuration
import cherrypy
import json
import youtube_dl
import youtube_dlc
import datetime
import dateutil.parser
import os

View File

@ -1,5 +1,5 @@
cherrypy
youtube-dl
youtube-dlc
cachetools
python-dateutil
requests

View File

@ -1,11 +1,13 @@
import re
import json
r_yt_initial_data = re.compile(r"""^(?:\s*window\["ytInitialData"\]|var ytInitialData) = (\{.*\});\s*\n?$""", re.M)
r_yt_initial_data = re.compile(r"""(?:\s*window\["ytInitialData"\]|var ytInitialData) = (\{.+\});</script>""")
def extract_yt_initial_data(content):
content = content.replace("\n", "")
m_yt_initial_data = re.search(r_yt_initial_data, content)
if m_yt_initial_data:
print(m_yt_initial_data.group(1))
yt_initial_data = json.loads(m_yt_initial_data.group(1))
return yt_initial_data
else: