Skip to content

Commit

Permalink
refactor: ♻️cleanup logs
Browse files Browse the repository at this point in the history
  • Loading branch information
Banou26 committed Jan 28, 2024
1 parent a12f95a commit a766b39
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 37 deletions.
6 changes: 2 additions & 4 deletions src/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ extern "C" {
av_packet_rescale_ts(packet, in_stream->time_base, out_stream->time_base);

if (in_stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
printf("pts: %f, prev duration: %f, duration: %f\n", packet->pts * av_q2d(out_stream->time_base), duration, packet->duration * av_q2d(out_stream->time_base));
// printf("pts: %f, prev duration: %f, duration: %f\n", packet->pts * av_q2d(out_stream->time_base), duration, packet->duration * av_q2d(out_stream->time_base));
duration += packet->duration * av_q2d(out_stream->time_base);
}

Expand All @@ -550,7 +550,7 @@ extern "C" {

prev_duration = duration;
prev_pts = pts;
printf("pts: %f, duration: %f\n", prev_pts, duration);
// printf("pts: %f, duration: %f\n", prev_pts, duration);
prev_pos = pos;

duration = 0;
Expand All @@ -565,7 +565,6 @@ extern "C" {
continue;
}

printf("flush: %d %d %d\n", is_flushing, flushed, empty_flush);
if (is_flushing && empty_flush) {
empty_flush = flush(
to_string(input_format_context->pb->pos),
Expand Down Expand Up @@ -731,7 +730,6 @@ extern "C" {

// Write callback called by AVIOContext
static int writeFunction(void* opaque, uint8_t* buf, int buf_size) {
printf("writeFunction\n");
Remuxer &remuxObject = *reinterpret_cast<Remuxer*>(opaque);

if (remuxObject.initializing && !remuxObject.first_init) {
Expand Down
38 changes: 12 additions & 26 deletions src/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ type Chunk = {
}

const BUFFER_SIZE = 2_500_000
const VIDEO_URL = '../video2.mkv'
// const VIDEO_URL = '../spidey.mkv'
// const VIDEO_URL = '../video5.mkv'
const VIDEO_URL = '../spidey.mkv'

export default async function saveFile(plaintext: ArrayBuffer, fileName: string, fileType: string) {
return new Promise((resolve, reject) => {
Expand Down Expand Up @@ -169,11 +169,11 @@ fetch(VIDEO_URL, { headers: { Range: `bytes=0-1` } })
'waiting'
]

for (const event of allVideoEvents) {
video.addEventListener(event, ev => {
console.log('video event', event, ev)
})
}
// for (const event of allVideoEvents) {
// video.addEventListener(event, ev => {
// console.log('video event', event, ev)
// })
// }

const seconds = document.createElement('div')
video.controls = true
Expand Down Expand Up @@ -253,20 +253,11 @@ fetch(VIDEO_URL, { headers: { Range: `bytes=0-1` } })
end: sourceBuffer.buffered.end(index)
}))

// video.addEventListener('timeupdate', () => {
// seconds.textContent = video.currentTime.toString()
// })

video.addEventListener('canplaythrough', () => {
video.playbackRate = 1
video.play()
}, { once: true })

const logAndAppend = async (chunk: Chunk) => {
console.log('res', chunk)
await appendBuffer(chunk.buffer)
}

let chunks: Chunk[] = []

const PREVIOUS_BUFFER_COUNT = 1
Expand All @@ -275,9 +266,7 @@ fetch(VIDEO_URL, { headers: { Range: `bytes=0-1` } })
await appendBuffer(headerChunk.buffer)

const pull = async () => {
console.log('read')
const chunk = await remuxer.read()
console.log('read', chunk)
chunks = [...chunks, chunk]
return chunk
}
Expand Down Expand Up @@ -335,12 +324,9 @@ fetch(VIDEO_URL, { headers: { Range: `bytes=0-1` } })
await appendBuffer(chunk1.buffer)
seeking = false
await updateBuffers()
console.log('seek time', performance.now() - p)
})

console.log('pulling first chunk')
const firstChunk = await pull()
console.log('first chunk', firstChunk)
appendBuffer(firstChunk.buffer)

video.addEventListener('timeupdate', () => {
Expand Down Expand Up @@ -368,10 +354,10 @@ fetch(VIDEO_URL, { headers: { Range: `bytes=0-1` } })

setTimeout(async () => {
// await video.pause()
video.currentTime = 587.618314
await new Promise(resolve => setTimeout(resolve, 1000))
// video.currentTime = 587.618314
// await new Promise(resolve => setTimeout(resolve, 1000))
// video.playbackRate = 5
video.currentTime = 400
// video.currentTime = 400
// await new Promise(resolve => setTimeout(resolve, 1000))
// video.currentTime = 300
// await new Promise(resolve => setTimeout(resolve, 1000))
Expand All @@ -380,8 +366,8 @@ fetch(VIDEO_URL, { headers: { Range: `bytes=0-1` } })
// video.currentTime = 600
// await new Promise(resolve => setTimeout(resolve, 1000))
// video.currentTime = 300
await new Promise(resolve => setTimeout(resolve, 1000))
video.currentTime = 534.953306
// await new Promise(resolve => setTimeout(resolve, 1000))
// video.currentTime = 534.953306
// await new Promise(resolve => setTimeout(resolve, 1000))
// video.currentTime = 100
}, 1000)
Expand Down
7 changes: 0 additions & 7 deletions src/worker/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,22 +60,18 @@ const init = makeCallListener(async (
},
streamRead: async (_offset: string) => {
const offset = Number(_offset)
console.log('worker streamRead', offset)
const res = await streamRead(offset)
console.log('worker streamRead end', offset, res)
return {
...res,
value: new Uint8Array(res.value)

Check failure on line 66 in src/worker/index.ts

View workflow job for this annotation

GitHub Actions / build

No overload matches this call.
}
},
clearStream: () => clearStream(),
randomRead: async (offset: number, bufferSize: number) => {
console.log('worker randomRead', offset, bufferSize)
const buffer = await randomRead(offset, bufferSize)
return buffer
},
write: async (buffer: Uint8Array) => {
console.log('worker write', buffer.byteLength)
const newBuffer = new Uint8Array(writeBuffer.byteLength + buffer.byteLength)
newBuffer.set(writeBuffer)
newBuffer.set(new Uint8Array(buffer), writeBuffer.byteLength)
Expand All @@ -87,7 +83,6 @@ const init = makeCallListener(async (
) => {
const offset = Number(_offset)
const position = Number(_position)
console.log('worker flush', writeBuffer.byteLength)
if (!writeBuffer.byteLength) return true
readResultPromiseResolve({
isHeader: false,
Expand Down Expand Up @@ -125,13 +120,11 @@ const init = makeCallListener(async (
},
seek: (timestamp: number) => remuxer.seek(timestamp),
read: () => {
console.log('worker read')
readResultPromise = new Promise<Chunk>((resolve, reject) => {
readResultPromiseResolve = resolve
readResultPromiseReject = reject
})
remuxer.read()
console.log('worker read end')
return readResultPromise
},
getInfo: () => remuxer.getInfo()
Expand Down

0 comments on commit a766b39

Please sign in to comment.