Skip to content

Commit

Permalink
Fixed unable to stream uncached media below minimum stream quality
Browse files Browse the repository at this point in the history
  • Loading branch information
SimplyBoo committed Mar 3, 2020
1 parent f2def52 commit a517847
Show file tree
Hide file tree
Showing 6 changed files with 67 additions and 41 deletions.
51 changes: 51 additions & 0 deletions server/src/cache/hash.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import Crypto from 'crypto';
import FS from 'fs';
import Util from 'util';

// Bytes to read from start and end of file.
const CHUNK_SIZE = 64 * 1024;

// This generates SubDB compatible hashes. http://thesubdb.com/api/
export async function createHash(path: string): Promise<string> {
const fd = await Util.promisify(FS.open)(path, 'r');
const buffer = Buffer.alloc(CHUNK_SIZE * 2);
const [startReadResult, statResult] = await Promise.all([
Util.promisify(FS.read)(fd, buffer, 0, CHUNK_SIZE, 0),
Util.promisify(FS.stat)(path),
]);

let total = startReadResult.bytesRead;
const endStart = statResult.size - CHUNK_SIZE;
if (endStart <= 0) {
buffer.copy(buffer, startReadResult.bytesRead, 0);
} else {
const endReadResult = await Util.promisify(FS.read)(
fd,
buffer,
startReadResult.bytesRead,
CHUNK_SIZE,
endStart,
);
total += endReadResult.bytesRead;
}
await Util.promisify(FS.close)(fd);

const hash = Crypto.createHash('md5');
hash.update(buffer.slice(0, total));
return hash.digest().toString('hex');
}

if (require.main === module) {
const file = process.argv[2];
if (!file) {
throw new Error('No file specified');
}

console.log(`Hashing ${file}...`);
createHash(file)
.then(hash => console.log(hash))
.catch(err => {
console.error(err);
process.exit(1);
});
}
38 changes: 5 additions & 33 deletions server/src/cache/import-utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import ChildProcess from 'child_process';
import Crypto from 'crypto';
import FS from 'fs';
import GM from 'gm';
import Path from 'path';
Expand All @@ -10,9 +9,6 @@ import Util from 'util';
import { BaseMedia, Media, MediaType, SegmentMetadata } from '../types';
import Config from '../config';

// Bytes to read from start and end of file.
const CHUNK_SIZE = 64 * 1024;

export interface Quality {
quality: number;
copy: boolean;
Expand All @@ -27,35 +23,6 @@ export interface LoadedImage {
const LOW_PRIORITY = 15;

export class ImportUtils {
public static async hash(path: string): Promise<string> {
const fd = await Util.promisify(FS.open)(path, 'r');
const buffer = Buffer.alloc(CHUNK_SIZE * 2);
const [startReadResult, statResult] = await Promise.all([
Util.promisify(FS.read)(fd, buffer, 0, CHUNK_SIZE, 0),
Util.promisify(FS.stat)(path),
]);

let total = startReadResult.bytesRead;
const endStart = statResult.size - CHUNK_SIZE;
if (endStart <= 0) {
buffer.copy(buffer, startReadResult.bytesRead, 0);
} else {
const endReadResult = await Util.promisify(FS.read)(
fd,
buffer,
startReadResult.bytesRead,
CHUNK_SIZE,
endStart,
);
total += endReadResult.bytesRead;
}
await Util.promisify(FS.close)(fd);

const hash = Crypto.createHash('md5');
hash.update(buffer.slice(0, total));
return hash.digest().toString('hex');
}

public static async getFileCreationTime(path: string): Promise<number> {
const stat = await Util.promisify(FS.stat)(path);
if (!stat) {
Expand Down Expand Up @@ -296,6 +263,11 @@ export class ImportUtils {
new Set([...streamQualities, ...(media.metadata.qualityCache || [])]),
).sort();

// If it's less than the minimum stream quality and not cached.
if (qualities.length === 0) {
qualities.push(media.metadata.height);
}

let data = '#EXTM3U';
for (const quality of qualities.sort()) {
// Can't round because otherwise they come out as needing the same bandwidth.
Expand Down
3 changes: 2 additions & 1 deletion server/src/tasks/indexer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import Util from 'util';
import { Database, Media, Metadata, RouterTask, TaskRunnerCallback } from '../types';
import { ImportUtils } from '../cache/import-utils';
import { Scanner } from './scanner';
import { createHash } from '../cache/hash';
import Config from '../config';

export class Indexer {
Expand Down Expand Up @@ -82,7 +83,7 @@ export class Indexer {
const type = ImportUtils.getType(file);

return {
hash: await ImportUtils.hash(absolutePath),
hash: await createHash(absolutePath),
metadata: {
...(type === 'video'
? await Indexer.getVideoMetadata(absolutePath)
Expand Down
9 changes: 5 additions & 4 deletions server/src/tasks/keyframe-generator.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { Database, RouterTask, TaskRunnerCallback } from '../types';
import { ImportUtils } from '../cache/import-utils';
import Config from '../config';

export class KeyframeGenerator {
public static getTask(database: Database): RouterTask {
Expand All @@ -25,12 +26,12 @@ export class KeyframeGenerator {

let generateSegments = !media.metadata.segments;
if (generateSegments && media.metadata.qualityCache) {
const desired = ImportUtils.getMediaDesiredQualities(media);
const streamQualities = Config.get().transcoder.streamQualities;
let hasAll = true;
// Check if it's cached at every desired quality, if it is then don't
// Check if it's cached at every streaming quality, if it is then don't
// bother precaching.
for (const quality of desired) {
if (!media.metadata.qualityCache.includes(quality.quality)) {
for (const quality of streamQualities) {
if (!media.metadata.qualityCache.includes(quality)) {
hasAll = false;
break;
}
Expand Down
4 changes: 2 additions & 2 deletions server/src/tasks/rehash.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import Path from 'path';
import Util from 'util';

import { Database, RouterTask, TaskRunnerCallback } from '../types';
import { ImportUtils } from '../cache/import-utils';
import { createHash } from '../cache/hash';
import Config from '../config';

export class RehashTask {
Expand All @@ -19,7 +19,7 @@ export class RehashTask {
console.warn(`Couldn't find media to rehash: ${files[i]}`);
continue;
}
const hash = await ImportUtils.hash(media.absolutePath);
const hash = await createHash(media.absolutePath);
if (hash !== media.hash) {
console.warn(`Hash changed for ${media.absolutePath}`);
// Rename the video cache folder, if it's a video.
Expand Down
3 changes: 2 additions & 1 deletion server/src/utils/import-json.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import Util from 'util';
import { BaseMedia, Database, DumpFile } from '../types';
import { ImportUtils } from '../cache/import-utils';
import { Indexer } from '../tasks/indexer';
import { createHash } from '../cache/hash';
import { setup as setupDb } from '../database';
import Config from '../config';

Expand Down Expand Up @@ -53,7 +54,7 @@ async function importMedia(db: Database, media: BaseMedia, version?: number): Pr

if (version === undefined || version === 3) {
// In older versions the hashing mechanism was different, rehash the file.
const hash = await ImportUtils.hash(Path.resolve(libraryDir, media.path));
const hash = await createHash(Path.resolve(libraryDir, media.path));

media.metadata.qualityCache = [media.metadata.height];
if (media.type === 'video') {
Expand Down

0 comments on commit a517847

Please sign in to comment.