fork from github/youtube-local

This commit is contained in:
Brandon4466
2025-06-29 20:42:55 -07:00
commit dce02a77a6
75 changed files with 22756 additions and 0 deletions

196
youtube/static/comments.css Normal file
View File

@@ -0,0 +1,196 @@
.video-metadata{
display: grid;
grid-template-columns: auto 1fr;
grid-template-rows: auto auto 1fr;
grid-template-areas:
"video-metadata-thumbnail-box title"
"video-metadata-thumbnail-box page"
"video-metadata-thumbnail-box sort";
margin-bottom: 10px;
}
.video-metadata > .video-metadata-thumbnail-box{
grid-area: video-metadata-thumbnail-box;
/* https://www.smashingmagazine.com/2020/03/setting-height-width-images-important-again/ */
position: relative;
width:320px;
max-width:100%;
}
.video-metadata > .video-metadata-thumbnail-box:before{
display: block;
content: "";
height: 0px;
padding-top: calc(180/320*100%);
}
.video-metadata-thumbnail-box img{
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
}
.video-metadata > .title{
word-wrap:break-word;
grid-area: title;
}
.video-metadata > h2{
grid-area: page;
font-size: 0.875rem;
}
.video-metadata > span{
grid-area: sort;
}
.comment-form{
display: grid;
align-content: start;
justify-items: start;
align-items: start;
}
#comment-account-options{
display:grid;
grid-auto-flow: column;
grid-column-gap: 10px;
margin-top:10px;
margin-bottom:10px;
}
#comment-account-options a{
margin-left:10px;
}
.comments-area{
display:grid;
}
.comments-area textarea{
resize: vertical;
justify-self:stretch;
}
.post-comment-button{
margin-top:10px;
justify-self:end;
}
.comment-links{
display:grid;
grid-auto-flow: column;
grid-column-gap: 10px;
justify-content:start;
}
.comments{
margin-top:10px;
grid-row-gap: 10px;
display: grid;
align-content:start;
}
.comment{
display:grid;
grid-template-columns: repeat(3, auto) 1fr;
grid-template-rows: repeat(4, auto);
grid-template-areas:
"author-avatar author-name permalink ."
"author-avatar comment-text comment-text comment-text"
"author-avatar comment-likes comment-likes comment-likes"
". bottom-row bottom-row bottom-row";
background-color: var(--interface-color);
justify-content: start;
}
.comment .author-avatar{
grid-area: author-avatar;
align-self: start;
margin-right: 5px;
height:32px;
width:32px;
}
.comment .author-avatar-img{
max-height: 100%;
}
.comment .author-name{
grid-area: author-name;
margin-right:15px;
white-space: nowrap;
overflow:hidden;
}
.comment .text{
grid-area: comment-text;
white-space: pre-wrap;
min-width: 0;
word-wrap: break-word;
}
.comment .permalink{
grid-area: permalink;
white-space: nowrap;
}
.comment .likes{
grid-area: comment-likes;
font-weight:bold;
white-space: nowrap;
}
.comment .bottom-row{
grid-area: bottom-row;
justify-self:start;
}
details.replies > summary{
background-color: var(--interface-color);
border-style: outset;
border-width: 1px;
font-weight: bold;
padding: 2px;
}
.replies-open-new-tab{
display: inline-block;
margin-top: 5px;
}
details.replies .comment{
max-width: 600px;
}
.more-comments{
justify-self:center;
margin-top:10px;
margin-bottom: 10px;
}
@media (max-width:500px){
.video-metadata{
grid-template-columns: 1fr;
grid-template-rows: 1fr auto auto auto;
grid-template-areas:
"video-metadata-thumbnail-box"
"title"
"page"
"sort";
}
.video-metadata > .video-metadata-thumbnail-box{
grid-area: video-metadata-thumbnail-box;
/* Switching these is required. Otherwise it breaks for some reason. CSS is terrible */
width: 100%;
max-width: 320px;
}
.comment{
grid-template-columns: auto 1fr;
grid-template-rows: repeat(5, auto);
grid-template-areas:
"author-avatar author-name"
"author-avatar comment-text"
"author-avatar comment-likes"
"author-avatar permalink"
". bottom-row";
}
.comment .author-name{
margin-right: 0px;
}
.comment .permalink{
justify-self: start;
margin-bottom: 10px;
}
}

View File

@@ -0,0 +1,66 @@
body{
--interface-color: #333333;
--text-color: #cccccc;
--background-color: #000000;
--video-background-color: #080808;
--link-color-rgb: 34, 170, 255;
--visited-link-color-rgb: 119, 85, 255;
}
a:link {
color: rgb(var(--link-color-rgb));
}
a:visited {
color: rgb(var(--visited-link-color-rgb));
}
a:not([href]){
color: var(--text-color);
}
.comment .permalink{
color: #ffffff;
}
.setting-item{
background-color: #444444;
}
.muted{
background-color: #111111;
color: gray;
}
.muted a:link {
color: #10547f;
}
.button,
input,
select,
button[type=submit]{
color: var(--text-color);
background-color: #444444;
border: 1px solid var(--text-color);
border-radius: 3px;
padding: 2px 3px;
}
.button:hover,
input:hover,
select:hover,
button[type=submit]:hover{
background-color: #222222;
}
input[type="checkbox"]{
-webkit-filter: invert(85%) hue-rotate(18deg) brightness(1.7);
filter: invert(85%) hue-rotate(18deg) brightness(1.7);
}
input[type="checkbox"]:checked{
-webkit-filter: none;
filter: none;
}

BIN
youtube/static/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -0,0 +1,20 @@
body{
--interface-color: #dadada;
--text-color: #222222;
--background-color: #bcbcbc;
--video-background-color: #dadada;
--link-color-rgb: 0, 0, 238;
--visited-link-color-rgb: 85, 26, 139;
}
.comment .permalink{
color: #000000;
}
.setting-item{
background-color: #eeeeee;
}
.muted{
background-color: #888888;
}

View File

@@ -0,0 +1,986 @@
// Heavily modified from
// https://github.com/nickdesaulniers/netfix/issues/4#issuecomment-578856471
// which was in turn modified from
// https://github.com/nickdesaulniers/netfix/blob/gh-pages/demo/bufferWhenNeeded.html
// Useful reading:
// https://stackoverflow.com/questions/35177797/what-exactly-is-fragmented-mp4fmp4-how-is-it-different-from-normal-mp4
// https://axel.isouard.fr/blog/2016/05/24/streaming-webm-video-over-html5-with-media-source
// We start by parsing the sidx (segment index) table in order to get the
// byte ranges of the segments. The byte range of the sidx table is provided
// by the indexRange variable by YouTube
// Useful info, as well as segments vs sequence mode (we use segments mode)
// https://joshuatz.com/posts/2020/appending-videos-in-javascript-with-mediasource-buffers/
// SourceBuffer data limits:
// https://developers.google.com/web/updates/2017/10/quotaexceedederror
// TODO: Call abort to cancel in-progress appends?
function AVMerge(video, srcInfo, startTime){
this.audioSource = null;
this.videoSource = null;
this.avRatio = null;
this.videoStream = null;
this.audioStream = null;
this.seeking = false;
this.startTime = startTime;
this.video = video;
this.mediaSource = null;
this.closed = false;
this.opened = false;
this.audioEndOfStreamCalled = false;
this.videoEndOfStreamCalled = false;
if (!('MediaSource' in window)) {
reportError('MediaSource not supported.');
return;
}
// Find supported video and audio sources
for (var src of srcInfo['videos']) {
if (MediaSource.isTypeSupported(src['mime_codec'])) {
reportDebug('Using video source', src['mime_codec'],
src['quality_string'], 'itag', src['itag']);
this.videoSource = src;
break;
}
}
for (var src of srcInfo['audios']) {
if (MediaSource.isTypeSupported(src['mime_codec'])) {
reportDebug('Using audio source', src['mime_codec'],
src['quality_string'], 'itag', src['itag']);
this.audioSource = src;
break;
}
}
if (this.videoSource === null)
reportError('No supported video MIME type or codec found: ',
srcInfo['videos'].map(s => s.mime_codec).join(', '));
if (this.audioSource === null)
reportError('No supported audio MIME type or codec found: ',
srcInfo['audios'].map(s => s.mime_codec).join(', '));
if (this.videoSource === null || this.audioSource === null)
return;
if (this.videoSource.bitrate && this.audioSource.bitrate)
this.avRatio = this.audioSource.bitrate/this.videoSource.bitrate;
else
this.avRatio = 1/10;
this.setup();
}
AVMerge.prototype.setup = function() {
this.mediaSource = new MediaSource();
this.video.src = URL.createObjectURL(this.mediaSource);
this.mediaSource.onsourceopen = this.sourceOpen.bind(this);
}
AVMerge.prototype.sourceOpen = function(_) {
// If after calling mediaSource.endOfStream, the user seeks back
// into the video, the sourceOpen event will be fired again. Do not
// overwrite the streams.
this.audioEndOfStreamCalled = false;
this.videoEndOfStreamCalled = false;
if (this.opened)
return;
this.opened = true;
this.videoStream = new Stream(this, this.videoSource, this.startTime,
this.avRatio);
this.audioStream = new Stream(this, this.audioSource, this.startTime,
this.avRatio);
this.videoStream.setup();
this.audioStream.setup();
this.timeUpdateEvt = addEvent(this.video, 'timeupdate',
this.checkBothBuffers.bind(this));
this.seekingEvt = addEvent(this.video, 'seeking',
debounce(this.seek.bind(this), 500));
//this.video.onseeked = function() {console.log('seeked')};
}
AVMerge.prototype.close = function() {
if (this.closed)
return;
this.closed = true;
this.videoStream.close();
this.audioStream.close();
this.timeUpdateEvt.remove();
this.seekingEvt.remove();
if (this.mediaSource.readyState == 'open')
this.mediaSource.endOfStream();
}
AVMerge.prototype.checkBothBuffers = function() {
this.audioStream.checkBuffer();
this.videoStream.checkBuffer();
}
AVMerge.prototype.seek = function(e) {
if (this.mediaSource.readyState === 'open') {
this.seeking = true;
this.audioStream.handleSeek();
this.videoStream.handleSeek();
this.seeking = false;
} else {
reportWarning('seek but not open? readyState:',
this.mediaSource.readyState);
}
}
AVMerge.prototype.audioEndOfStream = function() {
if (this.videoEndOfStreamCalled && !this.audioEndOfStreamCalled) {
reportDebug('Calling mediaSource.endOfStream()');
this.mediaSource.endOfStream();
}
this.audioEndOfStreamCalled = true;
}
AVMerge.prototype.videoEndOfStream = function() {
if (this.audioEndOfStreamCalled && !this.videoEndOfStreamCalled) {
reportDebug('Calling mediaSource.endOfStream()');
this.mediaSource.endOfStream();
}
this.videoEndOfStreamCalled = true;
}
AVMerge.prototype.printDebuggingInfo = function() {
reportDebug('videoSource:', this.videoSource);
reportDebug('audioSource:', this.videoSource);
reportDebug('video sidx:', this.videoStream.sidx);
reportDebug('audio sidx:', this.audioStream.sidx);
reportDebug('video updating', this.videoStream.sourceBuffer.updating);
reportDebug('audio updating', this.audioStream.sourceBuffer.updating);
reportDebug('video duration:', this.video.duration);
reportDebug('video current time:', this.video.currentTime);
reportDebug('mediaSource.readyState:', this.mediaSource.readyState);
reportDebug('videoEndOfStreamCalled', this.videoEndOfStreamCalled);
reportDebug('audioEndOfStreamCalled', this.audioEndOfStreamCalled);
for (let obj of [this.videoStream, this.audioStream]) {
reportDebug(obj.streamType, 'stream buffered times:');
for (let i=0; i<obj.sourceBuffer.buffered.length; i++) {
reportDebug(String(obj.sourceBuffer.buffered.start(i)) + '-'
+ String(obj.sourceBuffer.buffered.end(i)));
}
}
}
function Stream(avMerge, source, startTime, avRatio) {
this.avMerge = avMerge;
this.video = avMerge.video;
this.url = source['url'];
this.ext = source['ext'];
this.fileSize = source['file_size'];
this.closed = false;
this.mimeCodec = source['mime_codec']
this.streamType = source['acodec'] ? 'audio' : 'video';
if (this.streamType == 'audio') {
this.bufferTarget = avRatio*50*10**6;
} else {
this.bufferTarget = 50*10**6; // 50 megabytes
}
this.initRange = source['init_range'];
this.indexRange = source['index_range'];
this.startTime = startTime;
this.mediaSource = avMerge.mediaSource;
this.sidx = null;
this.appendRetries = 0;
this.appendQueue = []; // list of [segmentIdx, data]
this.sourceBuffer = this.mediaSource.addSourceBuffer(this.mimeCodec);
this.sourceBuffer.mode = 'segments';
this.sourceBuffer.addEventListener('error', (e) => {
this.reportError('sourceBuffer error', e);
});
this.updateendEvt = addEvent(this.sourceBuffer, 'updateend', (e) => {
if (this.appendQueue.length != 0) {
this.appendSegment(...this.appendQueue.shift());
}
});
}
Stream.prototype.setup = async function(){
// Group requests together
if (this.initRange.end+1 == this.indexRange.start){
fetchRange(
this.url,
this.initRange.start,
this.indexRange.end,
'Initialization+index segments',
).then(
(buffer) => {
var init_end = this.initRange.end - this.initRange.start + 1;
var index_start = this.indexRange.start - this.initRange.start;
var index_end = this.indexRange.end - this.initRange.start + 1;
this.setupInitSegment(buffer.slice(0, init_end));
this.setupSegmentIndex(buffer.slice(index_start, index_end));
}
);
} else {
// initialization data
await fetchRange(
this.url,
this.initRange.start,
this.initRange.end,
'Initialization segment',
).then(this.setupInitSegment.bind(this));
// sidx (segment index) table
fetchRange(
this.url,
this.indexRange.start,
this.indexRange.end,
'Index segment',
).then(this.setupSegmentIndex.bind(this));
}
}
Stream.prototype.setupInitSegment = function(initSegment) {
if (this.ext == 'webm')
this.sidx = extractWebmInitializationInfo(initSegment);
this.appendSegment(null, initSegment);
}
Stream.prototype.setupSegmentIndex = async function(indexSegment){
if (this.ext == 'webm') {
this.sidx.entries = parseWebmCues(indexSegment, this.sidx);
if (this.fileSize) {
let lastIdx = this.sidx.entries.length - 1;
this.sidx.entries[lastIdx].end = this.fileSize - 1;
}
for (let entry of this.sidx.entries) {
entry.subSegmentDuration = entry.tickEnd - entry.tickStart + 1;
if (entry.end)
entry.referencedSize = entry.end - entry.start + 1;
}
} else {
var box = unbox(indexSegment);
this.sidx = sidx_parse(box.data, this.indexRange.end+1);
}
this.fetchSegmentIfNeeded(this.getSegmentIdx(this.startTime));
}
Stream.prototype.close = function() {
// Prevents appendSegment adding to buffer if request finishes
// after closing
this.closed = true;
if (this.sourceBuffer.updating)
this.sourceBuffer.abort();
this.mediaSource.removeSourceBuffer(this.sourceBuffer);
this.updateendEvt.remove();
}
Stream.prototype.appendSegment = function(segmentIdx, chunk) {
if (this.closed)
return;
this.reportDebug('Received segment', segmentIdx)
// cannot append right now, schedule for updateend
if (this.sourceBuffer.updating) {
this.reportDebug('sourceBuffer updating, queueing for later');
this.appendQueue.push([segmentIdx, chunk]);
if (this.appendQueue.length > 2){
this.reportWarning('appendQueue length:', this.appendQueue.length);
}
return;
}
try {
this.sourceBuffer.appendBuffer(chunk);
if (segmentIdx !== null)
this.sidx.entries[segmentIdx].have = true;
this.appendRetries = 0;
} catch (e) {
if (e.name !== 'QuotaExceededError') {
throw e;
}
this.reportWarning('QuotaExceededError.');
// Count how many bytes are in buffer to update buffering target,
// updating .have as well for when we need to delete segments
var bytesInBuffer = 0;
for (var i = 0; i < this.sidx.entries.length; i++) {
if (this.segmentInBuffer(i))
bytesInBuffer += this.sidx.entries[i].referencedSize;
else if (this.sidx.entries[i].have) {
this.sidx.entries[i].have = false;
this.sidx.entries[i].requested = false;
}
}
bytesInBuffer = Math.floor(4/5*bytesInBuffer);
if (bytesInBuffer < this.bufferTarget) {
this.bufferTarget = bytesInBuffer;
this.reportDebug('New buffer target:', this.bufferTarget);
}
// Delete 10 segments (arbitrary) from buffer, making sure
// not to delete current one
var currentSegment = this.getSegmentIdx(this.video.currentTime);
var numDeleted = 0;
var i = 0;
const DELETION_TARGET = 10;
var toDelete = []; // See below for why we have to schedule it
this.reportDebug('Deleting segments from beginning of buffer.');
while (numDeleted < DELETION_TARGET && i < currentSegment) {
if (this.sidx.entries[i].have) {
toDelete.push(i)
numDeleted++;
}
i++;
}
if (numDeleted < DELETION_TARGET)
this.reportDebug('Deleting segments from end of buffer.');
i = this.sidx.entries.length - 1;
while (numDeleted < DELETION_TARGET && i > currentSegment) {
if (this.sidx.entries[i].have) {
toDelete.push(i)
numDeleted++;
}
i--;
}
// When calling .remove, the sourceBuffer will go into updating=true
// state, and remove cannot be called until it is done. So we have
// to delete on the updateend event for subsequent ones.
var removeFinishedEvent;
var deletedStuff = (toDelete.length !== 0)
var deleteSegment = () => {
if (toDelete.length === 0) {
removeFinishedEvent.remove();
// If QuotaExceeded happened for current segment, retry the
// append
// Rescheduling will take care of updating=true problem.
// Also check that we found segments to delete, to avoid
// infinite looping if we can't delete anything
if (segmentIdx === currentSegment && deletedStuff) {
this.reportDebug('Retrying appendSegment for', segmentIdx);
this.appendSegment(segmentIdx, chunk);
} else {
this.reportDebug('Not retrying segment', segmentIdx);
this.sidx.entries[segmentIdx].requested = false;
}
return;
}
let idx = toDelete.shift();
let entry = this.sidx.entries[idx];
let start = entry.tickStart/this.sidx.timeScale;
let end = (entry.tickEnd+1)/this.sidx.timeScale;
this.reportDebug('Deleting segment', idx);
this.sourceBuffer.remove(start, end);
entry.have = false;
entry.requested = false;
}
removeFinishedEvent = addEvent(this.sourceBuffer, 'updateend',
deleteSegment);
if (!this.sourceBuffer.updating)
deleteSegment();
}
}
Stream.prototype.getSegmentIdx = function(videoTime) {
// get an estimate
var currentTick = videoTime * this.sidx.timeScale;
var firstSegmentDuration = this.sidx.entries[0].subSegmentDuration;
var index = 1 + Math.floor(currentTick / firstSegmentDuration);
var index = clamp(index, 0, this.sidx.entries.length - 1);
var increment = 1;
if (currentTick < this.sidx.entries[index].tickStart){
increment = -1;
}
// go up or down to find correct index
while (index >= 0 && index < this.sidx.entries.length) {
var entry = this.sidx.entries[index];
if (entry.tickStart <= currentTick && (entry.tickEnd+1) > currentTick){
return index;
}
index = index + increment;
}
this.reportError('Could not find segment index for time', videoTime);
return 0;
}
Stream.prototype.checkBuffer = async function() {
if (this.avMerge.seeking) {
return;
}
// Find the first unbuffered segment, i
var currentSegmentIdx = this.getSegmentIdx(this.video.currentTime);
var bufferedBytesAhead = 0;
var i;
for (i = currentSegmentIdx; i < this.sidx.entries.length; i++) {
var entry = this.sidx.entries[i];
// check if we had it before, but it was deleted by the browser
if (entry.have && !this.segmentInBuffer(i)) {
this.reportDebug('segment', i, 'deleted by browser');
entry.have = false;
entry.requested = false;
}
if (!entry.have) {
break;
}
bufferedBytesAhead += entry.referencedSize;
if (bufferedBytesAhead > this.bufferTarget) {
return;
}
}
if (i < this.sidx.entries.length && !this.sidx.entries[i].requested) {
this.fetchSegment(i);
// We have all the segments until the end
// Signal the end of stream
} else if (i == this.sidx.entries.length) {
if (this.streamType == 'audio')
this.avMerge.audioEndOfStream();
else
this.avMerge.videoEndOfStream();
}
}
Stream.prototype.segmentInBuffer = function(segmentIdx) {
var entry = this.sidx.entries[segmentIdx];
// allow for 0.01 second error
var timeStart = entry.tickStart/this.sidx.timeScale + 0.01;
/* Some of YouTube's mp4 fragments are malformed, with half-frame
playback gaps. In this video at 240p (timeScale = 90000 ticks/second)
https://www.youtube.com/watch?v=ZhOQCwJvwlo
segment 4 (starting at 0) is claimed in the sidx table to have
a duration of 388500 ticks, but closer examination of the file using
Bento4 mp4dump shows that the segment has 129 frames at 3000 ticks
per frame, which gives an actual duration of 38700 (1500 less than
claimed). The file is 30 fps, so this error is exactly half a frame.
Note that the base_media_decode_time exactly matches the tickStart,
so the media decoder is being given a time gap of half a frame.
The practical result of this is that sourceBuffer.buffered reports
a timeRange.end that is less than expected for that segment, resulting in
a false determination that the browser has deleted a segment.
Segment 5 has the opposite issue, where it has a 1500 tick surplus of video
data compared to the sidx length. Segments 6 and 7 also have this
deficit-surplus pattern.
This might have something to do with the fact that the video also
has 60 fps formats. In order to allow for adaptive streaming and seamless
quality switching, YouTube likely encodes their formats to line up nicely.
Either there is a bug in their encoder, or this is intentional. Allow for
up to 1 frame-time of error to work around this issue. */
if (this.streamType == 'video')
var endError = 1/(this.avMerge.videoSource.fps || 30);
else
var endError = 0.01
var timeEnd = (entry.tickEnd+1)/this.sidx.timeScale - endError;
var timeRanges = this.sourceBuffer.buffered;
for (var i=0; i < timeRanges.length; i++) {
if (timeRanges.start(i) <= timeStart && timeEnd <= timeRanges.end(i)) {
return true;
}
}
return false;
}
Stream.prototype.fetchSegment = function(segmentIdx) {
entry = this.sidx.entries[segmentIdx];
entry.requested = true;
this.reportDebug(
'Fetching segment', segmentIdx, ', bytes',
entry.start, entry.end, ', seconds',
entry.tickStart/this.sidx.timeScale,
(entry.tickEnd+1)/this.sidx.timeScale
)
fetchRange(
this.url,
entry.start,
entry.end,
String(this.streamType) + ' segment ' + String(segmentIdx),
).then(this.appendSegment.bind(this, segmentIdx));
}
Stream.prototype.fetchSegmentIfNeeded = function(segmentIdx) {
if (segmentIdx < 0 || segmentIdx >= this.sidx.entries.length){
return;
}
entry = this.sidx.entries[segmentIdx];
// check if we had it before, but it was deleted by the browser
if (entry.have && !this.segmentInBuffer(segmentIdx)) {
this.reportDebug('segment', segmentIdx, 'deleted by browser');
entry.have = false;
entry.requested = false;
}
if (entry.requested) {
return;
}
this.fetchSegment(segmentIdx);
}
Stream.prototype.handleSeek = function() {
var segmentIdx = this.getSegmentIdx(this.video.currentTime);
this.fetchSegmentIfNeeded(segmentIdx);
}
Stream.prototype.reportDebug = function(...args) {
reportDebug(String(this.streamType) + ':', ...args);
}
Stream.prototype.reportWarning = function(...args) {
reportWarning(String(this.streamType) + ':', ...args);
}
Stream.prototype.reportError = function(...args) {
reportError(String(this.streamType) + ':', ...args);
}
// Utility functions
// https://gomakethings.com/promise-based-xhr/
// https://stackoverflow.com/a/30008115
// http://lofi.limo/blog/retry-xmlhttprequest-carefully
function fetchRange(url, start, end, debugInfo) {
return new Promise((resolve, reject) => {
var retryCount = 0;
var xhr = new XMLHttpRequest();
function onFailure(err, message, maxRetries=5){
message = debugInfo + ': ' + message + ' - Err: ' + String(err);
retryCount++;
if (retryCount > maxRetries || xhr.status == 403){
reportError('fetchRange error while fetching ' + message);
reject(message);
return;
} else {
reportWarning('Failed to fetch ' + message
+ '. Attempting retry '
+ String(retryCount) +'/' + String(maxRetries));
}
// Retry in 1 second, doubled for each next retry
setTimeout(function(){
xhr.open('get',url);
xhr.send();
}, 1000*Math.pow(2,(retryCount-1)));
}
xhr.open('get', url);
xhr.timeout = 15000;
xhr.responseType = 'arraybuffer';
xhr.setRequestHeader('Range', 'bytes=' + start + '-' + end);
xhr.onload = function (e) {
if (xhr.status >= 200 && xhr.status < 300) {
resolve(xhr.response);
} else {
onFailure(e,
'Status '
+ String(xhr.status) + ' ' + String(xhr.statusText)
);
}
};
xhr.onerror = function (event) {
onFailure(e, 'Network error');
};
xhr.ontimeout = function (event){
xhr.timeout += 5000;
onFailure(null, 'Timeout (15s)', maxRetries=5);
};
xhr.send();
});
}
function debounce(func, wait, immediate) {
var timeout;
return function() {
var context = this;
var args = arguments;
var later = function() {
timeout = null;
if (!immediate) func.apply(context, args);
};
var callNow = immediate && !timeout;
clearTimeout(timeout);
timeout = setTimeout(later, wait);
if (callNow) func.apply(context, args);
};
}
function clamp(number, min, max) {
return Math.max(min, Math.min(number, max));
}
// allow to remove an event listener without having a function reference
function RegisteredEvent(obj, eventName, func) {
this.obj = obj;
this.eventName = eventName;
this.func = func;
obj.addEventListener(eventName, func);
}
RegisteredEvent.prototype.remove = function() {
this.obj.removeEventListener(this.eventName, this.func);
}
function addEvent(obj, eventName, func) {
return new RegisteredEvent(obj, eventName, func);
}
function reportWarning(...args){
console.warn(...args);
}
function reportError(...args){
console.error(...args);
}
function reportDebug(...args){
console.debug(...args);
}
function byteArrayToIntegerLittleEndian(unsignedByteArray){
var result = 0;
for (byte of unsignedByteArray){
result = result*256;
result += byte
}
return result;
}
function byteArrayToFloat(byteArray) {
var view = new DataView(byteArray.buffer);
if (byteArray.length == 4)
return view.getFloat32(byteArray.byteOffset);
else
return view.getFloat64(byteArray.byteOffset);
}
function ByteParser(data){
this.curIndex = 0;
this.data = new Uint8Array(data);
}
ByteParser.prototype.readInteger = function(nBytes){
var result = byteArrayToIntegerLittleEndian(
this.data.slice(this.curIndex, this.curIndex + nBytes)
);
this.curIndex += nBytes;
return result;
}
ByteParser.prototype.readBufferBytes = function(nBytes){
var result = this.data.slice(this.curIndex, this.curIndex + nBytes);
this.curIndex += nBytes;
return result;
}
// BEGIN iso-bmff-parser-stream/lib/box/sidx.js (modified)
// https://github.com/necccc/iso-bmff-parser-stream/blob/master/lib/box/sidx.js
/* The MIT License (MIT)
Copyright (c) 2014 Szabolcs Szabolcsi-Toth
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.*/
function sidx_parse (data, offset) {
var bp = new ByteParser(data),
version = bp.readInteger(1),
flags = bp.readInteger(3),
referenceId = bp.readInteger(4),
timeScale = bp.readInteger(4),
earliestPresentationTime = bp.readInteger(version === 0 ? 4 : 8),
firstOffset = bp.readInteger(4),
__reserved = bp.readInteger(2),
entryCount = bp.readInteger(2),
entries = [];
var totalBytesOffset = firstOffset + offset;
var totalTicks = 0;
for (var i = entryCount; i > 0; i=i-1 ) {
let referencedSize = bp.readInteger(4),
subSegmentDuration = bp.readInteger(4),
unused = bp.readBufferBytes(4)
entries.push({
referencedSize: referencedSize,
subSegmentDuration: subSegmentDuration,
unused: unused,
start: totalBytesOffset,
end: totalBytesOffset + referencedSize - 1, // inclusive
tickStart: totalTicks,
tickEnd: totalTicks + subSegmentDuration - 1,
requested: false,
have: false,
});
totalBytesOffset = totalBytesOffset + referencedSize;
totalTicks = totalTicks + subSegmentDuration;
}
return {
version: version,
flags: flags,
referenceId: referenceId,
timeScale: timeScale,
earliestPresentationTime: earliestPresentationTime,
firstOffset: firstOffset,
entries: entries
};
}
// END sidx.js
// BEGIN iso-bmff-parser-stream/lib/unbox.js (same license), modified
function unbox(buf) {
var bp = new ByteParser(buf),
bufferLength = buf.length,
length,
typeData,
boxData
length = bp.readInteger(4); // length of entire box,
typeData = bp.readInteger(4);
if (bufferLength - length < 0) {
reportWarning('Warning: sidx table is cut off');
return {
currentLength: bufferLength,
length: length,
type: typeData,
data: bp.readBufferBytes(bufferLength)
};
}
boxData = bp.readBufferBytes(length - 8);
return {
length: length,
type: typeData,
data: boxData
};
}
// END unbox.js
function extractWebmInitializationInfo(initializationSegment) {
var result = {
timeScale: null,
cuesOffset: null,
duration: null,
};
(new EbmlDecoder()).readTags(initializationSegment, (tagType, tag) => {
if (tag.name == 'TimecodeScale')
result.timeScale = byteArrayToIntegerLittleEndian(tag.data);
else if (tag.name == 'Duration')
// Integer represented as a float (why??); units of TimecodeScale
result.duration = byteArrayToFloat(tag.data);
// https://lists.matroska.org/pipermail/matroska-devel/2013-July/004549.html
// "CueClusterPosition in turn is relative to the segment's data start
// position" (the data start is the position after the bytes
// used to represent the tag ID and entry size)
else if (tagType == 'start' && tag.name == 'Segment')
result.cuesOffset = tag.dataStart;
});
if (result.timeScale === null) {
result.timeScale = 1000000;
}
// webm timecodeScale is the number of nanoseconds in a tick
// Convert it to number of ticks per second to match mp4 convention
result.timeScale = 10**9/result.timeScale;
return result;
}
function parseWebmCues(indexSegment, initInfo) {
var entries = [];
var currentEntry = {};
var cuesOffset = initInfo.cuesOffset;
(new EbmlDecoder()).readTags(indexSegment, (tagType, tag) => {
if (tag.name == 'CueTime') {
const tickStart = byteArrayToIntegerLittleEndian(tag.data);
currentEntry.tickStart = tickStart;
if (entries.length !== 0)
entries[entries.length - 1].tickEnd = tickStart - 1;
} else if (tag.name == 'CueClusterPosition') {
const byteStart = byteArrayToIntegerLittleEndian(tag.data);
currentEntry.start = cuesOffset + byteStart;
if (entries.length !== 0)
entries[entries.length - 1].end = cuesOffset + byteStart - 1;
} else if (tagType == 'end' && tag.name == 'CuePoint') {
entries.push(currentEntry);
currentEntry = {};
}
});
if (initInfo.duration)
entries[entries.length - 1].tickEnd = initInfo.duration - 1;
return entries;
}
// BEGIN node-ebml (modified) for parsing WEBM cues table
// https://github.com/node-ebml/node-ebml
/* Copyright (c) 2013-2018 Mark Schmale and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.*/
const schema = new Map([
[0x18538067, ['Segment', 'm']],
[0x1c53bb6b, ['Cues', 'm']],
[0xbb, ['CuePoint', 'm']],
[0xb3, ['CueTime', 'u']],
[0xb7, ['CueTrackPositions', 'm']],
[0xf7, ['CueTrack', 'u']],
[0xf1, ['CueClusterPosition', 'u']],
[0x1549a966, ['Info', 'm']],
[0x2ad7b1, ['TimecodeScale', 'u']],
[0x4489, ['Duration', 'f']],
]);
function EbmlDecoder() {
this.buffer = null;
this.emit = null;
this.tagStack = [];
this.cursor = 0;
}
EbmlDecoder.prototype.readTags = function(chunk, onParsedTag) {
this.buffer = new Uint8Array(chunk);
this.emit = onParsedTag;
while (this.cursor < this.buffer.length) {
if (!this.readTag() || !this.readSize() || !this.readContent()) {
break;
}
}
}
EbmlDecoder.prototype.getSchemaInfo = function(tag) {
if (Number.isInteger(tag) && schema.has(tag)) {
var name, type;
[name, type] = schema.get(tag);
return {name, type};
}
return {
type: null,
name: 'unknown',
};
}
EbmlDecoder.prototype.readTag = function() {
if (this.cursor >= this.buffer.length) {
return false;
}
const tag = readVint(this.buffer, this.cursor);
if (tag == null) {
return false;
}
const tagObj = {
tag: tag.value,
...this.getSchemaInfo(tag.valueWithLeading1),
start: this.cursor,
end: this.cursor + tag.length, // exclusive; also overwritten below
};
this.tagStack.push(tagObj);
this.cursor += tag.length;
return true;
}
EbmlDecoder.prototype.readSize = function() {
const tagObj = this.tagStack[this.tagStack.length - 1];
if (this.cursor >= this.buffer.length) {
return false;
}
const size = readVint(this.buffer, this.cursor);
if (size == null) {
return false;
}
tagObj.dataSize = size.value;
// unknown size
if (size.value === -1) {
tagObj.end = -1;
} else {
tagObj.end += size.value + size.length;
}
this.cursor += size.length;
tagObj.dataStart = this.cursor;
return true;
}
EbmlDecoder.prototype.readContent = function() {
const { type, dataSize, ...rest } = this.tagStack[
this.tagStack.length - 1
];
if (type === 'm') {
this.emit('start', { type, dataSize, ...rest });
return true;
}
if (this.buffer.length < this.cursor + dataSize) {
return false;
}
const data = this.buffer.subarray(this.cursor, this.cursor + dataSize);
this.cursor += dataSize;
this.tagStack.pop(); // remove the object from the stack
this.emit('tag', { type, dataSize, data, ...rest });
while (this.tagStack.length > 0) {
const topEle = this.tagStack[this.tagStack.length - 1];
if (this.cursor < topEle.end) {
break;
}
this.emit('end', topEle);
this.tagStack.pop();
}
return true;
}
// user234683 notes: The matroska variable integer format is as follows:
// The first byte is where the length of the integer in bytes is determined.
// The number of bytes for the integer is equal to the number of leading
// zeroes in that first byte PLUS 1. Then there is a single 1 bit separator,
// and the rest of the bits in the first byte and the rest of the bits in
// the subsequent bytes are the value of the number. Note the 1-bit separator
// is not part of the value, but by convention IS included in the value for the
// EBML Tag IDs in the schema table above
// The byte-length includes the first byte. So one could also say the number
// of leading zeros is the number of subsequent bytes to include.
function readVint(buffer, start = 0) {
const length = 8 - Math.floor(Math.log2(buffer[start]));
if (start + length > buffer.length) {
return null;
}
let value = buffer[start] & ((1 << (8 - length)) - 1);
let valueWithLeading1 = buffer[start] & ((1 << (8 - length + 1)) - 1);
for (let i = 1; i < length; i += 1) {
// user234683 notes: Bails out with -1 (unknown) if the value would
// exceed 53 bits, which is the limit since JavaScript stores all
// numbers as floating points. See
// https://github.com/node-ebml/node-ebml/issues/49
if (i === 7) {
if (value >= 2 ** 8 && buffer[start + 7] > 0) {
return { length, value: -1, valueWithLeading1: -1 };
}
}
value *= 2 ** 8;
value += buffer[start + i];
valueWithLeading1 *= 2 ** 8;
valueWithLeading1 += buffer[start + i];
}
return { length, value, valueWithLeading1 };
}
// END node-ebml

View File

@@ -0,0 +1,20 @@
function onClickReplies(e) {
var details = e.target.parentElement;
// e.preventDefault();
console.log("loading replies ..");
doXhr(details.getAttribute("src") + "&slim=1", (html) => {
var div = details.querySelector(".comment_page");
div.innerHTML = html;
});
details.removeEventListener('click', onClickReplies);
}
window.addEventListener('DOMContentLoaded', function() {
QA("details.replies").forEach(details => {
details.addEventListener('click', onClickReplies);
details.addEventListener('auxclick', (e) => {
if (e.target.parentElement !== details) return;
if (e.button == 1) window.open(details.getAttribute("src"));
});
});
});

113
youtube/static/js/common.js Normal file
View File

@@ -0,0 +1,113 @@
Q = document.querySelector.bind(document);
QA = document.querySelectorAll.bind(document);
function text(msg) { return document.createTextNode(msg); }
function clearNode(node) { while (node.firstChild) node.removeChild(node.firstChild); }
function toTimestamp(seconds) {
var seconds = Math.floor(seconds);
var minutes = Math.floor(seconds/60);
var seconds = seconds % 60;
var hours = Math.floor(minutes/60);
var minutes = minutes % 60;
if (hours) {
return `0${hours}:`.slice(-3) + `0${minutes}:`.slice(-3) + `0${seconds}`.slice(-2);
}
return `0${minutes}:`.slice(-3) + `0${seconds}`.slice(-2);
}
var cur_track_idx = 0;
function getActiveTranscriptTrackIdx() {
let textTracks = Q("video").textTracks;
if (!textTracks.length) return;
for (let i=0; i < textTracks.length; i++) {
if (textTracks[i].mode == "showing") {
cur_track_idx = i;
return cur_track_idx;
}
}
return cur_track_idx;
}
function getActiveTranscriptTrack() { return Q("video").textTracks[getActiveTranscriptTrackIdx()]; }
function getDefaultTranscriptTrackIdx() {
let textTracks = Q("video").textTracks;
return textTracks.length - 1;
}
function doXhr(url, callback=null) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.onload = (e) => {
callback(e.currentTarget.response);
}
xhr.send();
return xhr;
}
// https://stackoverflow.com/a/30810322
function copyTextToClipboard(text) {
var textArea = document.createElement("textarea");
//
// *** This styling is an extra step which is likely not required. ***
//
// Why is it here? To ensure:
// 1. the element is able to have focus and selection.
// 2. if element was to flash render it has minimal visual impact.
// 3. less flakyness with selection and copying which **might** occur if
// the textarea element is not visible.
//
// The likelihood is the element won't even render, not even a
// flash, so some of these are just precautions. However in
// Internet Explorer the element is visible whilst the popup
// box asking the user for permission for the web page to
// copy to the clipboard.
//
// Place in top-left corner of screen regardless of scroll position.
textArea.style.position = 'fixed';
textArea.style.top = 0;
textArea.style.left = 0;
// Ensure it has a small width and height. Setting to 1px / 1em
// doesn't work as this gives a negative w/h on some browsers.
textArea.style.width = '2em';
textArea.style.height = '2em';
// We don't need padding, reducing the size if it does flash render.
textArea.style.padding = 0;
// Clean up any borders.
textArea.style.border = 'none';
textArea.style.outline = 'none';
textArea.style.boxShadow = 'none';
// Avoid flash of white box if rendered for any reason.
textArea.style.background = 'transparent';
textArea.value = text;
let parent_el = video.parentElement;
parent_el.appendChild(textArea);
textArea.focus();
textArea.select();
try {
var successful = document.execCommand('copy');
var msg = successful ? 'successful' : 'unsuccessful';
console.log('Copying text command was ' + msg);
} catch (err) {
console.log('Oops, unable to copy');
}
parent_el.removeChild(textArea);
}
window.addEventListener('DOMContentLoaded', function() {
cur_track_idx = getDefaultTranscriptTrackIdx();
});

View File

@@ -0,0 +1,56 @@
function onKeyDown(e) {
if (['INPUT', 'TEXTAREA'].includes(document.activeElement.tagName)) return false;
// console.log(e);
let v = Q("video");
if (!e.isTrusted) return; // plyr CustomEvent
let c = e.key.toLowerCase();
if (e.ctrlKey) return;
else if (c == "k") {
v.paused ? v.play() : v.pause();
}
else if (c == "arrowleft") {
e.preventDefault();
v.currentTime = v.currentTime - 5;
}
else if (c == "arrowright") {
e.preventDefault();
v.currentTime = v.currentTime + 5;
}
else if (c == "j") {
e.preventDefault();
v.currentTime = v.currentTime - 10;
}
else if (c == "l") {
e.preventDefault();
v.currentTime = v.currentTime + 10;
}
else if (c == "f") {
e.preventDefault();
if (data.settings.video_player == 1)
player.fullscreen.toggle()
else {
if (document.fullscreen) document.exitFullscreen();
else v.requestFullscreen();
}
}
else if (c == "c") {
e.preventDefault();
if (data.settings.video_player == 1)
player.toggleCaptions();
else {
let tt = getActiveTranscriptTrack();
if (tt == null) return;
if (tt.mode == "showing") tt.mode = "disabled";
else tt.mode = "showing";
}
}
else if (c == "t") {
let ts = Math.floor(Q("video").currentTime);
copyTextToClipboard(`https://youtu.be/${data.video_id}?t=${ts}`);
}
}
window.addEventListener('DOMContentLoaded', function() {
document.addEventListener('keydown', onKeyDown);
});

View File

@@ -0,0 +1,145 @@
var captionsActive;
if(data.settings.subtitles_mode == 2)
captionsActive = true;
else if(data.settings.subtitles_mode == 1 && data.has_manual_captions)
captionsActive = true;
else
captionsActive = false;
var qualityOptions = [];
var qualityDefault;
for (var src of data['uni_sources']) {
qualityOptions.push(src.quality_string)
}
for (var src of data['pair_sources']) {
qualityOptions.push(src.quality_string)
}
if (data['using_pair_sources'])
qualityDefault = data['pair_sources'][data['pair_idx']].quality_string;
else if (data['uni_sources'].length != 0)
qualityDefault = data['uni_sources'][data['uni_idx']].quality_string;
else
qualityDefault = 'None';
// Fix plyr refusing to work with qualities that are strings
Object.defineProperty(Plyr.prototype, 'quality', {
set: function(input) {
const config = this.config.quality;
const options = this.options.quality;
if (!options.length) {
return;
}
// removing this line:
//let quality = [!is.empty(input) && Number(input), this.storage.get('quality'), config.selected, config.default].find(is.number);
// replacing with:
quality = input;
let updateStorage = true;
if (!options.includes(quality)) {
// Plyr sets quality to null at startup, resulting in the erroneous
// calling of this setter function with input = null, and the
// commented out code below would set the quality to something
// unrelated at startup. Comment out and just return.
return;
/*const value = closest(options, quality);
this.debug.warn(`Unsupported quality option: ${quality}, using ${value} instead`);
quality = value; // Don't update storage if quality is not supported
updateStorage = false;*/
} // Update config
config.selected = quality; // Set quality
this.media.quality = quality; // Save to storage
if (updateStorage) {
this.storage.set({
quality
});
}
}
});
const playerOptions = {
disableContextMenu: false,
captions: {
active: captionsActive,
language: data.settings.subtitles_language,
},
controls: [
'play-large',
'play',
'progress',
'current-time',
'duration',
'mute',
'volume',
'captions',
'settings',
'fullscreen',
],
iconUrl: "/youtube.com/static/modules/plyr/plyr.svg",
blankVideo: "/youtube.com/static/modules/plyr/blank.webm",
debug: false,
storage: {enabled: false},
// disable plyr hotkeys in favor of hotkeys.js
keyboard: {
focused: false,
global: false,
},
quality: {
default: qualityDefault,
options: qualityOptions,
forced: true,
onChange: function(quality) {
if (quality == 'None')
return;
if (quality.includes('(integrated)')) {
for (var i=0; i < data['uni_sources'].length; i++) {
if (data['uni_sources'][i].quality_string == quality) {
changeQuality({'type': 'uni', 'index': i});
return;
}
}
} else {
for (var i=0; i < data['pair_sources'].length; i++) {
if (data['pair_sources'][i].quality_string == quality) {
changeQuality({'type': 'pair', 'index': i});
return;
}
}
}
},
},
previewThumbnails: {
enabled: storyboard_url != null,
src: [storyboard_url],
},
settings: ['captions', 'quality', 'speed', 'loop'],
}
// if the value set by user is -1, the volume option is omitted, as it only accepts value b/w 0 and 1
// https://github.com/sampotts/plyr#options
if (data.settings.default_volume !== -1) {
playerOptions.volume = data.settings.default_volume / 100;
}
const player = new Plyr(document.querySelector('video'), playerOptions);
// disable double click to fullscreen
// https://github.com/sampotts/plyr/issues/1370#issuecomment-528966795
player.eventListeners.forEach(function(eventListener) {
if(eventListener.type === 'dblclick') {
eventListener.element.removeEventListener(eventListener.type, eventListener.callback, eventListener.options);
}
});
// Add .started property, true after the playback has been started
// Needed so controls won't be hidden before playback has started
player.started = false;
player.once('playing', function(){this.started = true});

View File

@@ -0,0 +1,40 @@
"use strict";
// from: https://git.gir.st/subscriptionfeed.git/blob/59a590d:/app/youtube/templates/watch.html.j2#l28
var sha256=function a(b){function c(a,b){return a>>>b|a<<32-b}for(var d,e,f=Math.pow,g=f(2,32),h="length",i="",j=[],k=8*b[h],l=a.h=a.h||[],m=a.k=a.k||[],n=m[h],o={},p=2;64>n;p++)if(!o[p]){for(d=0;313>d;d+=p)o[d]=p;l[n]=f(p,.5)*g|0,m[n++]=f(p,1/3)*g|0}for(b+="\x80";b[h]%64-56;)b+="\x00";for(d=0;d<b[h];d++){if(e=b.charCodeAt(d),e>>8)return;j[d>>2]|=e<<(3-d)%4*8}for(j[j[h]]=k/g|0,j[j[h]]=k,e=0;e<j[h];){var q=j.slice(e,e+=16),r=l;for(l=l.slice(0,8),d=0;64>d;d++){var s=q[d-15],t=q[d-2],u=l[0],v=l[4],w=l[7]+(c(v,6)^c(v,11)^c(v,25))+(v&l[5]^~v&l[6])+m[d]+(q[d]=16>d?q[d]:q[d-16]+(c(s,7)^c(s,18)^s>>>3)+q[d-7]+(c(t,17)^c(t,19)^t>>>10)|0),x=(c(u,2)^c(u,13)^c(u,22))+(u&l[1]^u&l[2]^l[1]&l[2]);l=[w+x|0].concat(l),l[4]=l[4]+w|0}for(d=0;8>d;d++)l[d]=l[d]+r[d]|0}for(d=0;8>d;d++)for(e=3;e+1;e--){var y=l[d]>>8*e&255;i+=(16>y?0:"")+y.toString(16)}return i}; /*https://geraintluff.github.io/sha256/sha256.min.js (public domain)*/
window.addEventListener("load", load_sponsorblock);
document.addEventListener('DOMContentLoaded', ()=>{
const check = document.querySelector("#skip_sponsors");
check.addEventListener("change", () => {if (check.checked) load_sponsorblock()});
});
function load_sponsorblock(){
const info_elem = Q('#skip_n');
if (info_elem.innerText.length) return; // already fetched
const hash = sha256(data.video_id).substr(0,4);
const video_obj = Q("video");
let url = `/https://sponsor.ajay.app/api/skipSegments/${hash}`;
fetch(url)
.then(response => response.json())
.then(r => {
for (const video of r) {
if (video.videoID != data.video_id) continue;
info_elem.innerText = `(${video.segments.length} segments)`;
const cat_n = video.segments.map(e=>e.category).sort()
.reduce((acc,e) => (acc[e]=(acc[e]||0)+1, acc), {});
info_elem.title = Object.entries(cat_n).map(e=>e.join(': ')).join(', ');
for (const segment of video.segments) {
const [start, stop] = segment.segment;
if (segment.category != "sponsor") continue;
video_obj.addEventListener("timeupdate", function() {
if (Q("#skip_sponsors").checked &&
this.currentTime >= start &&
this.currentTime < stop-1) {
this.currentTime = stop;
}
});
}
}
});
}

View File

@@ -0,0 +1,151 @@
var details_tt, select_tt, table_tt;
function renderCues() {
var selectedTrack = Q("video").textTracks[select_tt.selectedIndex];
let cuesList = [...selectedTrack.cues];
var is_automatic = cuesList[0].text.startsWith(" \n");
// Firefox ignores cues starting with a blank line containing a space
// Automatic captions contain such a blank line in the first cue
let ff_bug = false;
if (!cuesList[0].text.length) { ff_bug = true; is_automatic = true };
let rows;
function forEachCue(callback) {
for (let i=0; i < cuesList.length; i++) {
let txt, startTime = selectedTrack.cues[i].startTime;
if (is_automatic) {
// Automatic captions repeat content. The new segment is displayed
// on the bottom row; the old one is displayed on the top row.
// So grab the bottom row only. Skip every other cue because the bottom
// row is empty.
if (i % 2) continue;
if (ff_bug && !selectedTrack.cues[i].text.length) {
txt = selectedTrack.cues[i+1].text;
} else {
txt = selectedTrack.cues[i].text.split('\n')[1].replace(/<[\d:.]*?><c>(.*?)<\/c>/g, "$1");
}
} else {
txt = selectedTrack.cues[i].text;
}
callback(startTime, txt);
}
}
function createTimestampLink(startTime, txt, title=null) {
a = document.createElement("a");
a.appendChild(text(txt));
a.href = "javascript:;"; // TODO: replace this with ?t parameter
if (title) a.title = title;
a.addEventListener("click", (e) => {
Q("video").currentTime = startTime;
})
return a;
}
clearNode(table_tt);
console.log("render cues..", selectedTrack.cues.length);
if (Q("input#transcript-use-table").checked) {
forEachCue((startTime, txt) => {
let tr, td, a;
tr = document.createElement("tr");
td = document.createElement("td")
td.appendChild(createTimestampLink(startTime, toTimestamp(startTime)));
tr.appendChild(td);
td = document.createElement("td")
td.appendChild(text(txt));
tr.appendChild(td);
table_tt.appendChild(tr);
});
rows = table_tt.rows;
}
else {
forEachCue((startTime, txt) => {
span = document.createElement("span");
var idx = txt.indexOf(" ", 1);
var [firstWord, rest] = [txt.slice(0, idx), txt.slice(idx)];
span.appendChild(createTimestampLink(startTime, firstWord, toTimestamp(startTime)));
if (rest) span.appendChild(text(rest + " "));
table_tt.appendChild(span);
});
rows = table_tt.childNodes;
}
var lastActiveRow = null;
function colorCurRow(e) {
// console.log("cuechange:", e);
var activeCueIdx = cuesList.findIndex((c) => c == selectedTrack.activeCues[0]);
var activeRowIdx = is_automatic ? Math.floor(activeCueIdx / 2) : activeCueIdx;
if (lastActiveRow) lastActiveRow.style.backgroundColor = "";
if (activeRowIdx < 0) return;
var row = rows[activeRowIdx];
row.style.backgroundColor = "#0cc12e42";
lastActiveRow = row;
}
colorCurRow();
selectedTrack.addEventListener("cuechange", colorCurRow);
}
function loadCues() {
let textTracks = Q("video").textTracks;
let selectedTrack = textTracks[select_tt.selectedIndex];
// See https://developer.mozilla.org/en-US/docs/Web/API/TextTrack/mode
// This code will (I think) make sure that the selected track's cues
// are loaded even if the track subtitles aren't on (showing). Setting it
// to hidden will load them.
let selected_track_target_mode = "hidden";
for (let track of textTracks) {
// Want to avoid unshowing selected track if it's showing
if (track.mode === "showing") selected_track_target_mode = "showing";
if (track !== selectedTrack) track.mode = "disabled";
}
if (selectedTrack.mode == "disabled") {
selectedTrack.mode = selected_track_target_mode;
}
var intervalID = setInterval(() => {
if (selectedTrack.cues && selectedTrack.cues.length) {
clearInterval(intervalID);
renderCues();
}
}, 100);
}
window.addEventListener('DOMContentLoaded', function() {
let textTracks = Q("video").textTracks;
if (!textTracks.length) return;
details_tt = Q("details#transcript-details");
details_tt.addEventListener("toggle", () => {
if (details_tt.open) loadCues();
});
select_tt = Q("select#select-tt");
select_tt.selectedIndex = getDefaultTranscriptTrackIdx();
select_tt.addEventListener("change", loadCues);
table_tt = Q("table#transcript-table");
table_tt.appendChild(text("loading.."));
textTracks.addEventListener("change", (e) => {
// console.log(e);
var idx = getActiveTranscriptTrackIdx(); // sadly not provided by 'e'
if (textTracks[idx].mode == "showing") {
select_tt.selectedIndex = idx;
loadCues();
}
else if (details_tt.open && textTracks[idx].mode == "disabled") {
textTracks[idx].mode = "hidden"; // so we still receive 'oncuechange'
}
})
Q("input#transcript-use-table").addEventListener("change", renderCues);
});

214
youtube/static/js/watch.js Normal file
View File

@@ -0,0 +1,214 @@
var video = document.querySelector('video');
function setVideoDimensions(height, width){
var body = document.querySelector('body');
body.style.setProperty('--video_height', String(height));
body.style.setProperty('--video_width', String(width));
if (height < 240)
body.style.setProperty('--plyr-control-spacing-num', '3');
else
body.style.setProperty('--plyr-control-spacing-num', '10');
var theaterWidth = Math.max(640, data['video_duration'] || 0, width);
body.style.setProperty('--theater_video_target_width', String(theaterWidth));
// This will set the correct media query
document.querySelector('#video-container').className = 'h' + height;
}
function changeQuality(selection) {
var currentVideoTime = video.currentTime;
var videoPaused = video.paused;
var videoSpeed = video.playbackRate;
var srcInfo;
if (avMerge)
avMerge.close();
if (selection.type == 'uni'){
srcInfo = data['uni_sources'][selection.index];
video.src = srcInfo.url;
} else {
srcInfo = data['pair_sources'][selection.index];
avMerge = new AVMerge(video, srcInfo, currentVideoTime);
}
setVideoDimensions(srcInfo.height, srcInfo.width);
video.currentTime = currentVideoTime;
if (!videoPaused){
video.play();
}
video.playbackRate = videoSpeed;
}
// Initialize av-merge
var avMerge;
if (data.using_pair_sources) {
var srcPair = data['pair_sources'][data['pair_idx']];
avMerge = new AVMerge(video, srcPair, 0);
}
// Quality selector
var qualitySelector = document.querySelector('#quality-select')
if (qualitySelector)
qualitySelector.addEventListener(
'change', function(e) {
changeQuality(JSON.parse(this.value))
}
);
// Set up video start time from &t parameter
if (data.time_start != 0 && video)
video.currentTime = data.time_start;
// External video speed control
var speedInput = document.querySelector('#speed-control');
speedInput.addEventListener('keyup', (event) => {
if (event.key === 'Enter') {
var speed = parseFloat(speedInput.value);
if(!isNaN(speed)){
video.playbackRate = speed;
}
}
});
// Playlist lazy image loading
if (data.playlist && data.playlist['id'] !== null) {
// lazy load playlist images
// copied almost verbatim from
// https://css-tricks.com/tips-for-rolling-your-own-lazy-loading/
// IntersectionObserver isn't supported in pre-quantum
// firefox versions, but the alternative of making it
// manually is a performance drain, so oh well
var observer = new IntersectionObserver(lazyLoad, {
// where in relation to the edge of the viewport, we are observing
rootMargin: "100px",
// how much of the element needs to have intersected
// in order to fire our loading function
threshold: 1.0
});
function lazyLoad(elements) {
elements.forEach(item => {
if (item.intersectionRatio > 0) {
// set the src attribute to trigger a load
item.target.src = item.target.dataset.src;
// stop observing this element. Our work here is done!
observer.unobserve(item.target);
};
});
};
// Tell our observer to observe all img elements with a "lazy" class
var lazyImages = document.querySelectorAll('img.lazy');
lazyImages.forEach(img => {
observer.observe(img);
});
}
// Autoplay
if (data.settings.related_videos_mode !== 0 || data.playlist !== null) {
let playability_error = !!data.playability_error;
let isPlaylist = false;
if (data.playlist !== null && data.playlist['current_index'] !== null)
isPlaylist = true;
// read cookies on whether to autoplay
// https://developer.mozilla.org/en-US/docs/Web/API/Document/cookie
let cookieValue;
let playlist_id;
if (isPlaylist) {
// from https://stackoverflow.com/a/6969486
function escapeRegExp(string) {
// $& means the whole matched string
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
playlist_id = data.playlist['id'];
playlist_id = escapeRegExp(playlist_id);
cookieValue = document.cookie.replace(new RegExp(
'(?:(?:^|.*;\\s*)autoplay_'
+ playlist_id + '\\s*\\=\\s*([^;]*).*$)|^.*$'
), '$1');
} else {
cookieValue = document.cookie.replace(new RegExp(
'(?:(?:^|.*;\\s*)autoplay\\s*\\=\\s*([^;]*).*$)|^.*$'
),'$1');
}
let autoplayEnabled = 0;
if(cookieValue.length === 0){
autoplayEnabled = 0;
} else {
autoplayEnabled = Number(cookieValue);
}
// check the checkbox if autoplay is on
let checkbox = document.querySelector('#autoplay-toggle');
if(autoplayEnabled){
checkbox.checked = true;
}
// listen for checkbox to turn autoplay on and off
let cookie = 'autoplay'
if (isPlaylist)
cookie += '_' + playlist_id;
checkbox.addEventListener( 'change', function() {
if(this.checked) {
autoplayEnabled = 1;
document.cookie = cookie + '=1; SameSite=Strict';
} else {
autoplayEnabled = 0;
document.cookie = cookie + '=0; SameSite=Strict';
}
});
if(!playability_error){
// play the video if autoplay is on
if(autoplayEnabled){
video.play();
}
}
// determine next video url
let nextVideoUrl;
if (isPlaylist) {
let currentIndex = data.playlist['current_index'];
if (data.playlist['current_index']+1 == data.playlist['items'].length)
nextVideoUrl = null;
else
nextVideoUrl = data.playlist['items'][data.playlist['current_index']+1]['url'];
// scroll playlist to proper position
// item height + gap == 100
let pl = document.querySelector('.playlist-videos');
pl.scrollTop = 100*currentIndex;
} else {
if (data.related.length === 0)
nextVideoUrl = null;
else
nextVideoUrl = data.related[0]['url'];
}
let nextVideoDelay = 1000;
// go to next video when video ends
// https://stackoverflow.com/a/2880950
if (nextVideoUrl) {
if(playability_error){
videoEnded();
} else {
video.addEventListener('ended', videoEnded, false);
}
function nextVideo(){
if(autoplayEnabled){
window.location.href = nextVideoUrl;
}
}
function videoEnded(e) {
window.setTimeout(nextVideo, nextVideoDelay);
}
}
}

View File

@@ -0,0 +1,21 @@
body{
--interface-color: #ffffff;
--text-color: #222222;
--background-color: #f8f8f8;
--video-background-color: #ffffff;
--link-color-rgb: 0, 0, 238;
--visited-link-color-rgb: 85, 26, 139;
}
.comment .permalink{
color: #000000;
}
.setting-item{
background-color: #f8f8f8;
}
.muted{
background-color: #888888;
}

Binary file not shown.

View File

@@ -0,0 +1,23 @@
# Build steps for Plyr (3.6.8)
Tested on Debian.
First install yarn (Javascript package manager). Instructions [here](https://classic.yarnpkg.com/en/docs/install/).
Clone the repo to a location of your choosing:
```
git clone https://github.com/sampotts/plyr.git
cd plyr
```
Install Plyr's dependencies:
```
yarn install
```
Build with gulp (which was hopefully installed by yarn):
```
gulp build
```
plyr.js and other files will be in the `dist` directory.

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -0,0 +1,59 @@
body{
--plyr-control-spacing: calc(var(--plyr-control-spacing-num)*1px);
--plyr-video-controls-background: rgba(0,0,0,0.8);
}
/* Scale captions with video height, not page width. Scale down to a minimum
of 10px so it does not become unreadable, rather than scaling
exactly proportional to video height */
.plyr__captions {
font-size: calc(18px + 8px*(var(--video_height) - 720)/720) !important;
}
/* make buffered progress more visible */
.plyr--video .plyr__progress__buffer{
color: rgba(255,255,255,0.75) !important;
}
/* Avoid visual jumps and flashes as plyr loads */
.plyr audio, .plyr iframe, .plyr video{
width: 100% !important;
height: 100% !important;
}
.plyr__video-wrapper{
height: 100% !important;
width: 100% !important;
}
/* Prevent this div from blocking right-click menu for video
e.g. Firefox playback speed options */
.plyr__poster{
display: none !important;
}
/* Get rid of obnoxiously high padding on controls bar */
.plyr__controls{
padding-top: 4px !important;
padding-bottom: 4px !important;
}
.plyr__captions{
pointer-events: none;
}
/* For menus without a button above them - make them scroll if
they are too high for the player*/
.plyr div[role="menu"]{
overflow-y: scroll;
max-height: calc(var(--video_height)*1px - 10px - 40px);
}
/* For menus with a button above them */
.plyr button + div[role="menu"]{
overflow-y: scroll;
/* Subtract margin between controls and menu, and controls height,
and height of top menu button*/
max-height: calc(var(--video_height)*1px - 10px - 40px - 42px*var(--plyr-control-spacing-num)/10);
}