Dominic Szablewski hace 8 años
padre
commit
7bf420fd0c
Se han modificado 23 ficheros con 4371 adiciones y 2890 borrados
  1. 1 1
      LICENSE
  2. 178 103
      README.md
  3. 17 0
      build.sh
  4. 3 0
      jsmpeg.min.js
  5. 0 2668
      jsmpg.js
  6. 122 0
      src/ajax-progressive.js
  7. 61 0
      src/ajax.js
  8. 196 0
      src/buffer.js
  9. 115 0
      src/canvas2d.js
  10. 96 0
      src/decoder.js
  11. 94 0
      src/jsmpeg.js
  12. 682 0
      src/mp2.js
  13. 1674 0
      src/mpeg1.js
  14. 243 0
      src/player.js
  15. 190 0
      src/ts.js
  16. 161 0
      src/video-element.js
  17. 131 0
      src/webaudio.js
  18. 230 0
      src/webgl.js
  19. 66 0
      src/websocket.js
  20. 0 42
      stream-example.html
  21. 0 76
      stream-server.js
  22. 22 0
      view-stream.html
  23. 89 0
      websocket-relay.js

+ 1 - 1
LICENSE Ver fichero

@@ -1,5 +1,5 @@
1 1
 The MIT License (MIT)
2
-Copyright (c) 2016 Dominic Szablewski
2
+Copyright (c) 2017 Dominic Szablewski
3 3
 
4 4
 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
5 5
 

+ 178 - 103
README.md Ver fichero

@@ -1,161 +1,236 @@
1
-# jsmpeg
1
+# JSMpeg – MPEG1 Video & MP2 Audio Decoder in JavaScript
2 2
 
3
-#### An MPEG1 Video Decoder in JavaScript ####
3
+JSMpeg is a Video Player written in JavaScript. It consists of an MPEG-TS demuxer, MPEG1 video & MP2 audio decoders, WebGL & Canvas2D renderers and WebAudio sound output. JSMpeg can load static videos via Ajax and allows low latency streaming (~75ms) via WebSocktes.
4 4
 
5
-jsmpeg is a MPEG1 Decoder, written in JavaScript. It's "hand ported", i.e. not compiled with
6
-emscripten or similar. This will probably make it obsolete with the advent of asmjs.
5
+JSMpeg can decode 720p Video at 30fps on an iPhone 5S, works in any modern browser (Chrome, Firefox, Safari, Edge) and comes in at just 20kb gzipped.
7 6
 
8
-Some demos and more info: [phoboslab.org/log/2013/05/mpeg1-video-decoder-in-javascript](http://www.phoboslab.org/log/2013/05/mpeg1-video-decoder-in-javascript)
7
+Using it can be as simple as this:
8
+```html
9
+<script src="jsmpeg.min.js"></script>
10
+<div class="jsmpeg" data-url="video.ts"></div>
11
+```
9 12
 
13
+Some more info and demos: [jsmpeg.com](http://jsmpeg.com/)
10 14
 
11
-## API ##
12 15
 
16
+## Usage
13 17
 
14
-### Constructor ###
18
+A JSMpeg video player can either be created in HTML using the CSS class `jsmpeg` for the container:
15 19
 
16
-`var player = new jsmpeg(file [, options])`
20
+```html
21
+<div class="jsmpeg" data-url="<url>"></div>
22
+```
17 23
 
18
-The `file` argument accepts a URL to a .mpg file or a (yet unconnected) WebSocket instance for streaming playback.
24
+or by directly calling the `JSMpeg.Player()` constructor in JavaScript:
19 25
 
20
-The `options` argument to the `jsmpeg()` supports the following properties:
26
+```javascript
27
+var player = new JSMpeg.Player(url [, options]);
28
+```
21 29
 
22
-- `benchmark` whether to log benchmark results to the browser's console
23
-- `progressive` whether to start playback as soon as the first chunks have been loaded. Uses HTTP range-requests. Default `false`.
24
-- `progressiveThrottled` whether to throttle downloading chunks until they're needed for playback. Requires `progressive`; default `false`.
25
-- `canvas` the HTML Canvas element to use; jsmpeg will create its own Canvas element if none is provided
26
-- `autoplay` whether playback should start automatically after loading
27
-- `loop` whether playback is looped
28
-- `seekable` whether a seek-index is build during load time; neccessary for `seekToFrame` and `seekToTime` methods
29
-- `preserveDrawingBuffer` whether the WebGL context is created with preserveDrawingBuffer - necessary for "screenshots" via `canvas.toDataURL()`; default `false`.
30
-- `onload` a function that's called once, after the .mpg file has been completely loaded
31
-- `ondecodeframe` a function that's called after every frame that's decoded and rendered to the canvas
32
-- `onfinished` a function that's called when playback ends
30
+Note that using the HTML Element (internally `JSMpeg.VideoElement`) provides some features on top of `JSMpeg.Player`. Namely a SVG pause/play button and the ability to "unlock" audio on iOS devices.
33 31
 
32
+The `url` argument accepts a URL to an MPEG .ts file or a WebSocket server (ws://...).
34 33
 
35
-### Methods ###
34
+The `options` argument supports the following properties:
36 35
 
37
-- `play()` begin playback
38
-- `pause()` pause playback
39
-- `stop()` stop playback and revert play position to the beginning
40
-- `seekToFrame(frame)` seek to the specified frame (Number)
41
-- `seekToTime(seconds)` seek to the specified time (Number)
42
-- `nextFrame()` if playback is paused, decode and render the next frame; returns then HTML Canvas element
36
+- `canvas` – the HTML Canvas elment to use for video rendering. If none is given, the renderer will create its own Canvas element.
37
+- `loop` – whether to loop the video (static files only). Default `true`.
38
+- `autoplay` - whether to start playing immediately (static files only). Default `false`.
39
+- `audio` - whether to decode audio. Default `true`.
40
+- `video` - whether to decode video. Default `true`.
41
+- `poster` – URL to an image to use as the poster to show before the video plays.
42
+- `pauseWhenHidden` – whether to pause playback when the tab is inactive. Default `true`. Note that browsers usually throttle JS in inactive tabs anyway.
43
+- `disableGl` - whether to disable WebGL and always use the Canvas2D renderer. Default `false`.
44
+- `preserveDrawingBuffer` – whether the WebGL context is created with `preserveDrawingBuffer` - necessary for "screenshots" via `canvas.toDataURL()`. Default `false`.
45
+- `progressive` - whether to load data in chunks (static files only). When enabled, playback can begin before the whole source has been completely loaded. Default `true`.
46
+- `throttled` - when using `progressive`, whether to defer loading chunks when they're not needed for playback yet. Default `true`
47
+- `chunkSize` - when using `progressive`, the chunk size in bytes to load at a time. Default `1024*1024` (1mb).
48
+- `decodeFirstFrame` - whether to decode and display the first frame of the video. Useful to set up the Canvas size and use the frame as the "poster" image. This has no effect when using `autoplay` or streaming sources. Default `true`.
49
+- `maxAudioLag` – when streaming, the maximum enqueued audio length in seconds.
50
+- `videoBufferSize` – when streaming, size in bytes for the video decode buffer. Default 512*1024 (512kb). You may have to increase this for very high bitrates.
51
+- `audioBufferSize` – when streaming, size in bytes for the audio decode buffer. Default 128*1024 (128kb). You may have to increase this for very high bitrates.
43 52
 
44
-When live streaming, jsmpeg supports the following methods for recording the stream clientside
45
-- `canRecord()` returns `true` when streaming has started and recording can begin, `false` otherwise
46
-- `startRecording(callback)` attempts to start recording, calls the optional callback when recording started - usually when the next intraframe was received
47
-- `stopRecording()` stops recording and returns a `Blob` with the recorded .mpg data
53
+All options except from `canvas` can also be used with the HTML Element through `data-` attributes. E.g. to specify looping and autoplay in JavaScript:
48 54
 
55
+```javascript
56
+var player = new JSMpeg.Player('video.ts' {loop: true, autoplay: true});
57
+```
49 58
 
59
+or HTML
60
+```html
61
+<div class="jsmpeg" data-url="video.ts" 
62
+	data-loop="true" data-autoplay="true"></div>
63
+```
50 64
 
51
-## Usage Examples ##
65
+Note that `camelCased` options have to be hyphenated when used as data attributes. E.g. `decodeFirstFrame: true` becomes `data-decode-first-frame="true"` for the HTML element.
52 66
 
53
-```javascript
54
-// Synopsis: var player = new jsmpeg(urlToFile, options);
55
-// The 'options' argument and all of its properties is optional. If no canvas element 
56
-// is given, jsmpeg will create its own, to be accessed at .canvas
57 67
 
58
-// Example:
59
-var canvas = document.getElementById('videoCanvas');
60
-var player = new jsmpeg('file.mpeg', {canvas: canvas, autoplay: true, loop: true});
68
+## JSMpeg.Player API
61 69
 
62
-player.pause();
63
-player.play();
64
-player.stop();
70
+A `JSMpeg.Player` instance supports the following methods and properties:
65 71
 
72
+- `.play()` – start playback
73
+- `.pause()` – pause playback
74
+- `.stop()` – stop playback and seek to the beginning
75
+- `.volume` – get or set the audio volume (0-1)
76
+- `.currentTime` – get or set the current playback position in seconds
66 77
 
67
-// If you pass 'seekable: true' in the options, you can seek to a specific frame
68
-// or time in the video.
69 78
 
70
-var player = new jsmpeg('file.mpeg', {canvas: canvas, seekable: true});
79
+## Encoding Video/Audio for JSMpeg
71 80
 
72
-player.seekToFrame(1200); // Seek to intra frame before frame 1200
73
-player.seekToTime(20); // Seek to intra frame before 20sec
81
+JSMpeg only supports playback of MPEG-TS containers with the MPEG1 Video Codec and the MP2 Audio Codec. The Video Decoder does not handle B-Frames correctly (though no modern encoder seems to use these by default anyway) and the width of the video has to be a multiple of 2.
74 82
 
75
-// seekToFrame() and seekToTime() only seek to the closest, previous intra frame by
76
-// default. If you want to seek to the exact frame or time, pass 'true' as second
77
-// parameter.
78
-// Depending on the input video, this can be potentially slow, as jsmpeg has
79
-// to decode all frames between the previous intra frame and the seek target
83
+You can encode a suitable video using [ffmpeg](https://ffmpeg.org/) like this:
80 84
 
81
-player.seekToFrame(1200, true); // Seek to frame 1200 exactly
85
+```sh
86
+ffmpeg -i in.mp4 -f mpegts -codec:v mpeg1video -codec:a mp2 -b 0 out.ts
87
+```
82 88
 
89
+You can also control the video size (`-s`), framerate (`-r`), video bitrate (`-b:v`), audio bitrate (`-b:a`), number of audio channels (`-ac`), sampling rate (`-ar`) and much more. Please refer to the ffmpeg documentation for the details.
83 90
 
84
-// Passing 'seekable: true' also populates the total frame count and duration
85
-// of the video
91
+Comprehensive example:
92
+```sh
93
+ffmpeg -i in.mp4 -f mpegts \
94
+	-codec:v mpeg1video -s 960x540 -b:v 1500k -r 30 -bf 0 \
95
+	-codec:a mp2 -ar 44100 -ac 1 -b:a 128k \
96
+	out.ts
97
+```
86 98
 
87
-console.log('Duration: '+player.duration+' seconds ('+player.frameCount+' frames)')
88 99
 
100
+## Performance Considerations
89 101
 
102
+While JSMpeg can handle 720p video at 30fps even on an iPhone 5S, keep in mind that MPEG1 is not as efficient as modern codecs. MPEG1 needs quite a bit of bandwidth for HD video. 720p begins to look okay-ish at 2 Mbits/s (that's 250kb/s). Also, the higher the bitrate, the more work JavaScript has to do to decode it.
90 103
 
91
-// An 'onload' callback can be specified in the 'options' argument
92
-var mpegLoaded = function( player ) {
93
-	console.log('Loaded', player);
94
-};
95
-var player = new jsmpeg('file.mpeg', {onload: mpegLoaded});
104
+This should not be a problem for static files, or if you're only streaming within your local WiFi. If you don't need to support mobile devices, 1080p at 10mbit/s works just fine (if your encoder can keep up). For everything else I would advise you to use 540p (960x540) at 2Mbit/s max.
96 105
 
97
-// If you don't use 'autoplay' and don't explicitly call .play(), you can get individual
98
-// video frames (a canvas element) like so:
99
-var frame = null;
100
-while( (frame = player.nextFrame()) ) {
101
-	someOtherCanvasContext.drawImage(frame, 0, 0);
102
-}
103
-```
104 106
 
107
+## Streaming via WebSockets
105 108
 
109
+JSMpeg can connect to a WebSocket server that sends out binary MPEG-TS data. When streaming, JSMpeg tries to keep latency as low as possible - it immediately decodes everything it has, ignoring video and audio timestamps altogether. To keep everything in sync (and latency low), audio data should be interleaved between video frames very frequently (`-muxdelay` in ffmpeg).
106 110
 
107
-### Live Streaming ###
111
+A separate, buffered streaming mode, where JSMpeg pre-loads a few seconds of data and presents everything with exact timing and audio/video sync is conceivable, but currently not implemented.
108 112
 
109
-jsmpeg supports streaming live video through WebSockets. You can use ffmpeg and a nodejs server to serve the MPEG video. See this [blog post](http://phoboslab.org/log/2013/09/html5-live-video-streaming-via-websockets) for the details of setting up a server. Also have a look at the `stream-server.js` and `stream-example.html`.
113
+The internal buffers for video and audio are fairly small (512kb and 128kb respectively) and JSMpeg will discard old (even unplayed) data to make room for newly arriving data without much fuzz. This could introduce decoding artifacts when there's a network congestion, but ensures that latency is kept at a minimum. If necessary You can increase the `videoBufferSize` and `audioBufferSize` through the options.
110 114
 
111
-To configure jsmpeg to connect to the stream server, simply pass a WebSocket connection instead of a filename to the constructor:
115
+JSMpeg comes with a tiny WebSocket "relay", written in Node.js. This server accepts an MPEG-TS source over HTTP and serves it via WebSocket to all connecting Browsers. The incoming HTTP stream can be generated using [ffmpeg](https://ffmpeg.org/), gstreamer or by other means.
112 116
 
113
-```javascript
114
-// Setup the WebSocket connection and start the player
115
-var client = new WebSocket( 'ws://example.com:8084/' );
116
-var player = new jsmpeg(client, {canvas:canvas});
117
+The split between the source and the WebSocket relay is necessary, because ffmpeg doesn't speak the WebSocket protocol. However, this split also allows you to install the WebSocket relay on a public server and share your stream on the Internet (typically NAT in your router prevents the public Internet from connecting _into_ your local network).
118
+
119
+In short, it works like this:
120
+
121
+1) run the websocket-relay.js
122
+2) run ffmpeg, send output to the relay's HTTP port
123
+3) connect JSMpeg in the browser to the relay's Websocket port
124
+
125
+
126
+## Example Setup for Streaming: Raspberry Pi Live Webcam
127
+
128
+For this example, ffmpeg and the WebSocket relay run on the same system. This allows you to view the stream in your local network, but not on the public internet.
129
+
130
+This example assumes that your webcam is compatible with Video4Linux2 and appears as `/dev/video0` in the filesystem. Most USB webcams support the UVC standard and should work just fine. The onboard Raspberry Camera can be made available as V4L2 device by loading a kernel module: `sudo modprobe bcm2835-v4l2`.
131
+
132
+
133
+1) Install ffmpeg (See [How to install ffmpeg on Debian / Raspbian](http://superuser.com/questions/286675/how-to-install-ffmpeg-on-debian)). Using ffmpeg, we can capture the webcam video & audio and encode it into MPEG1/MP2.
134
+
135
+2) Install Node.js and npm (See [Installing Node.js on Debian and Ubuntu based Linux distributions](https://nodejs.org/en/download/package-manager/#debian-and-ubuntu-based-linux-distributions) for newer versions). The Websocket relay is written in Node.js
136
+
137
+3) Install http-server. We will use this to serve the static files (view-stream.html, jsmpeg.min.js), so that we can view the website with the video in our browser. Any other webserver would work as well (nginx, apache, etc.):
138
+`sudo npm -g install http-server`
139
+
140
+4) Install git and clone this repository (or just download it as ZIP and unpack)
141
+```
142
+sudo apt-get install git
143
+git clone https://github.com/phoboslab/jsmpeg.git
117 144
 ```
118 145
 
119
-###Stream Recording###
146
+5) Change into the jsmpeg/ directory
147
+`cd jsmpeg/`
120 148
 
121
-To record an MPEG stream clientside in the browser jsmpeg provides the `.startRecording(cb)` and `.stopRecording()` methods. `.stopRecording()` returns a `Blob` object that can be used to create a download link.
149
+6) Install the Node.js Websocket Library:
150
+`npm install ws`
122 151
 
123
-```javascript
124
-player.startRecording(function(player){
125
-	// Called when recording really starts; usually 
126
-	// when the next intra frame is received
127
-});
152
+7) Start the Websocket relay. Provide a password and a port for the incomming HTTP video stream and a Websocket port that we can connect to in the browser:
153
+`node websocket-relay.js supersecret 8081 8082`
128 154
 
129
-// ...
155
+8) In a new terminal window (still in the `jsmpeg/` directory, start the `http-server` so we can serve the view-stream.html to the browser:
156
+`http-server`
130 157
 
131
-// Stop recording and create a download link
132
-var blob = player.stopRecording();
158
+9) Open the streaming website in your browser. The `http-server` will tell you the ip (usually `192.168.[...]`) and port (usually `8080`) where it's running on:
159
+`http://192.168.[...]:8080/view-stream.html`
133 160
 
134
-var filename = 'jsmpeg-recording.mpg';
135
-var a = document.getElementById('downloadLink');
136
-a.innerHTML = filename;
137
-a.download = fileName;
138
-a.href = window.URL.createObjectURL(blob);
161
+10) In a third terminal window, start ffmpeg to capture the webcam video and send it to the Websocket relay. Provide the password and port (from step 8) in the destination URL:
162
+```
163
+ffmpeg \
164
+	-f v4l2 \
165
+		-framerate 25 -video_size 640x480 -i /dev/video0 \
166
+	-f mpegts \
167
+		-codec:v mpeg1video -s 640x480 -b:v 1000k -bf 0 \
168
+	http://localhost:8081/supersecret
139 169
 ```
140 170
 
171
+You should now see a live webcam image in your browser. 
141 172
 
173
+If ffmpeg failed to open the input video, it's likely that your webcam does not support the given resolution, format or framerate. To get a list of compatible modes run:
142 174
 
143
-## Limitations ##
175
+`ffmpeg -f v4l2 -list_formats all -i /dev/video0`
144 176
 
145
-- Playback can only start when the file is fully loaded (when not streaming through WebSockets). I'm waiting for chunked XHR with ArrayBuffers to arrive in browsers.
146
-- MPEG files with B-Frames look weird - frames are not reordered. This should be relatively easy
147
-to fix, but most encoders seem to not use B-Frames at all by default.
148
-- The width of the MPEG video has to be a multiple of 2.
149
-- Only raw MPEG video streams are supported. The decoder hates Stream Packet Headers in between
150
-macroblocks.
151 177
 
152
-You can use [FFmpeg](http://www.ffmpeg.org/) to encode videos in a suited format. This will crop
153
-the size to a multiple of 2, omit B-Frames and force a raw video stream:
178
+To add the webcam audio, just call ffmpeg with two separate inputs.
154 179
 
155 180
 ```
156
-ffmpeg -i in.mp4 -f mpeg1video -vf "crop=iw-mod(iw\,2):ih-mod(ih\,2)" -b 0 out.mpg
181
+ffmpeg \
182
+	-f v4l2 \
183
+		-framerate 25 -video_size 640x480 -i /dev/video0 \
184
+	-f alsa \
185
+		-ar 44100 -c 2 -i -i hw:0 \
186
+	-f mpegts \
187
+		-codec:v mpeg1video -s 640x480 -b:v 1000k -bf 0 \
188
+		-codec:a mp2 -b:a 128k \
189
+		-muxdelay 0.001 \
190
+	http://localhost:8081/supersecret
191
+```
192
+
193
+Note the `muxdelay` argument. This should reduce lag, but doesn't always work when streaming video and audio - see remarks below.:
194
+
195
+
196
+## Some remarks about ffmpeg muxing and latency
197
+
198
+Adding an audio stream to the MPEG-TS can sometimes introduce considerable latency. I especially found this to be a problem on linux using ALSA and V4L2 (using AVFoundation on macOS worked just fine). However, there is a simple workaround: just run two instances of ffmpeg in parallel. One for audio, one for video. Send both outputs to the same Websocket relay. Thanks to the simplicity of the MPEG-TS format, proper "muxing" of the two streams happens automatically in the relay.
199
+
157 200
 ```
201
+ffmpeg \
202
+	-f v4l2 \
203
+		-framerate 25 -video_size 640x480 -i /dev/video0 \
204
+	-f mpegts \
205
+		-codec:v mpeg1video -s 640x480 -b:v 1000k -bf 0 \
206
+		-muxdelay 0.001 \
207
+	http://localhost:8081/supersecret
208
+
209
+# In a second terminal
210
+ffmpeg \
211
+	-f alsa \
212
+		-ar 44100 -c 2 -i -i hw:0 \
213
+	-f mpegts \
214
+		-codec:a mp2 -b:a 128k \
215
+		-muxdelay 0.001 \
216
+	http://localhost:8081/supersecret
217
+```
218
+In my tests, USB Webcams introduce about ~180ms of latency and there seems to be nothing we can do about it. The Raspberry Pi however has a [camera module](https://www.raspberrypi.org/products/camera-module-v2/) that provides lower latency video capture.
219
+
220
+To capture webcam input on Windows or macOS using ffmpeg, see the [ffmpeg Capture/Webcam Wiki](https://trac.ffmpeg.org/wiki/Capture/Webcam).
221
+
222
+
223
+## JSMpeg Architecture and Internals
224
+
225
+This library was built in a fairly modular fashion while keeping overhead at a minimum. Implementing new Demuxers, Decoders, Outputs (Renderers, Audio Devices) or Sources should be possible without changing any other parts. However, you would still need to subclass the `JSMpeg.Player` in order to use any new modules.
226
+
227
+Have a look a the [jsmpeg.js source](https://github.com/phoboslab/jsmpeg/blob/master/src/jsmpg.js) for an overview of how the modules interconnect and what APIs they should provide. I also wrote a blog post about some of JSMpeg's internals: [Decode It Like It's 1999](http://phoboslab.org/log/2017/01/decode-it-like-it-s-1999).
228
+
229
+Using parts of the library without creating a full player should also be fairly straightforward. E.g. you can create a stand-alone instance of the `JSMpeg.Decoder.MPEG1Video` class, `.connect()` a renderer, `.write()` some data to it and `.decode()` a frame, without touching JSMpeg's other parts.
230
+
231
+
232
+## Previous Version
233
+
234
+The JSMpeg version currently living in this repo is a complete rewrite of the original jsmpeg library that was just able to decode raw mpeg1video. If you're looking for the old version, see the [v0.2 tag](https://github.com/phoboslab/jsmpeg/releases/tag/v0.2).
158 235
 
159
-## Inspiration ##
160 236
 
161
-jsmpeg is based on [Java MPEG-1 Video Decoder and Player](http://sourceforge.net/projects/javampeg1video/) by Korandi Zoltan and inspired by [MPEG Decoder in Java ME](http://www.developer.nokia.com/Community/Wiki/MPEG_decoder_in_Java_ME) by Nokia.

+ 17 - 0
build.sh Ver fichero

@@ -0,0 +1,17 @@
1
+#!/bin/sh
2
+uglifyjs \
3
+	src/jsmpeg.js \
4
+	src/video-element.js \
5
+	src/player.js \
6
+	src/buffer.js \
7
+	src/ajax.js \
8
+	src/ajax-progressive.js \
9
+	src/websocket.js \
10
+	src/ts.js \
11
+	src/decoder.js \
12
+	src/mpeg1.js \
13
+	src/mp2.js \
14
+	src/webgl.js \
15
+	src/canvas2d.js \
16
+	src/webaudio.js \
17
+	-o jsmpeg.min.js

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 3 - 0
jsmpeg.min.js


La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 0 - 2668
jsmpg.js


+ 122 - 0
src/ajax-progressive.js Ver fichero

@@ -0,0 +1,122 @@
1
+JSMpeg.Source.AjaxProgressive = (function(){ "use strict";
2
+
3
+var AjaxProgressiveSource = function(url, options) {
4
+	this.url = url;
5
+	this.destination = null;
6
+	this.request = null;
7
+
8
+	this.completed = false;
9
+	this.established = false;
10
+	this.progress = 0;
11
+
12
+	this.fileSize = 0;
13
+	this.loadedSize = 0;
14
+	this.chunkSize = options.chunkSize || 1024*1024;
15
+
16
+	this.isLoading = false;
17
+	this.loadStartTime = 0;
18
+	this.throttled = options.throttled !== false;
19
+};
20
+
21
+AjaxProgressiveSource.prototype.connect = function(destination) {
22
+	this.destination = destination;
23
+};
24
+
25
+AjaxProgressiveSource.prototype.start = function() {
26
+	this.request = new XMLHttpRequest();
27
+
28
+	this.request.onreadystatechange = function() {
29
+		if (this.request.readyState === this.request.DONE) {
30
+			this.fileSize = parseInt(
31
+				this.request.getResponseHeader("Content-Length")
32
+			);
33
+			this.loadNextChunk();
34
+		}
35
+	}.bind(this);
36
+
37
+	this.request.onprogress = this.onProgress.bind(this);
38
+	this.request.open('HEAD', this.url);
39
+	this.request.send();
40
+};
41
+
42
+AjaxProgressiveSource.prototype.resume = function(secondsHeadroom) {
43
+	if (this.isLoading || !this.throttled) {
44
+		return;
45
+	}
46
+
47
+	// Guess the worst case loading time with lots of safety margin. This is
48
+	// somewhat arbitrary...
49
+	var worstCaseLoadingTime = this.loadTime * 8 + 2;
50
+	if (worstCaseLoadingTime > secondsHeadroom) {
51
+		this.loadNextChunk();
52
+	}
53
+};
54
+
55
+AjaxProgressiveSource.prototype.abort = function() {
56
+	this.request.abort();
57
+};
58
+
59
+AjaxProgressiveSource.prototype.loadNextChunk = function() {
60
+	var start = this.loadedSize,
61
+		end = Math.min(this.loadedSize + this.chunkSize-1, this.fileSize-1);
62
+	
63
+	if (start >= this.fileSize) {
64
+		this.completed = true;
65
+		return;
66
+	}
67
+	
68
+	this.isLoading = true;
69
+	this.loadStartTime = JSMpeg.Now();
70
+	this.request = new XMLHttpRequest();
71
+
72
+	this.request.onreadystatechange = function() {		
73
+		if (
74
+			this.request.readyState === this.request.DONE && 
75
+			this.request.status >= 200 && this.request.status < 300
76
+		) {
77
+			this.onChunkLoad(this.request.response);
78
+		}
79
+		else if (this.request.readyState === this.request.DONE) {
80
+			// Retry?
81
+			if (this.loadFails++ < 3) {
82
+				this.loadNextChunk();
83
+			}
84
+		}
85
+	}.bind(this);
86
+	
87
+	if (start === 0) {
88
+		this.request.onprogress = this.onProgress.bind(this);
89
+	}
90
+
91
+	this.request.open('GET', this.url+'?'+start+"-"+end);
92
+	this.request.setRequestHeader("Range", "bytes="+start+"-"+end);
93
+	this.request.responseType = "arraybuffer";
94
+	this.request.send();
95
+};
96
+
97
+AjaxProgressiveSource.prototype.onProgress = function(ev) {
98
+	this.progress = (ev.loaded / ev.total);
99
+};
100
+
101
+AjaxProgressiveSource.prototype.onChunkLoad = function(data) {
102
+	this.established = true;
103
+	this.progress = 1;
104
+	this.loadedSize += data.byteLength;
105
+	this.loadFails = 0;
106
+	this.isLoading = false;
107
+
108
+	if (this.destination) {
109
+		this.destination.write(data);
110
+	}
111
+
112
+	this.loadTime = JSMpeg.Now() - this.loadStartTime;
113
+	if (!this.throttled) {
114
+		this.loadNextChunk();
115
+	}
116
+};
117
+
118
+return AjaxProgressiveSource;
119
+
120
+})();
121
+
122
+

+ 61 - 0
src/ajax.js Ver fichero

@@ -0,0 +1,61 @@
1
+JSMpeg.Source.Ajax = (function(){ "use strict";
2
+
3
+var AjaxSource = function(url, options) {
4
+	this.url = url;
5
+	this.destination = null;
6
+	this.request = null;
7
+
8
+	this.completed = false;
9
+	this.established = false;
10
+	this.progress = 0;
11
+};
12
+
13
+AjaxSource.prototype.connect = function(destination) {
14
+	this.destination = destination;
15
+};
16
+
17
+AjaxSource.prototype.start = function() {
18
+	this.request = new XMLHttpRequest();
19
+
20
+	this.request.onreadystatechange = function() {
21
+		if (
22
+			this.request.readyState === this.request.DONE && 
23
+			this.request.status === 200
24
+		) {
25
+			this.onLoad(this.request.response);
26
+		}
27
+	}.bind(this);
28
+
29
+	this.request.onprogress = this.onProgress.bind(this);
30
+	this.request.open('GET', this.url);
31
+	this.request.responseType = "arraybuffer";
32
+	this.request.send();
33
+};
34
+
35
+AjaxSource.prototype.resume = function(secondsHeadroom) {
36
+	// Nothing to do here
37
+};
38
+
39
+AjaxSource.prototype.abort = function() {
40
+	this.request.abort();
41
+};
42
+
43
+AjaxSource.prototype.onProgress = function(ev) {
44
+	this.progress = (ev.loaded / ev.total);
45
+};
46
+
47
+AjaxSource.prototype.onLoad = function(data) {
48
+	this.established = true;
49
+	this.completed = true;
50
+	this.progress = 1;
51
+
52
+	if (this.destination) {
53
+		this.destination.write(data);
54
+	}
55
+};
56
+
57
+return AjaxSource;
58
+
59
+})();
60
+
61
+

+ 196 - 0
src/buffer.js Ver fichero

@@ -0,0 +1,196 @@
1
+JSMpeg.BitBuffer = (function(){ "use strict";
2
+
3
+var BitBuffer = function(bufferOrLength, mode) {
4
+	if (typeof(bufferOrLength) === 'object') {
5
+		this.bytes = (bufferOrLength instanceof Uint8Array)
6
+			? bufferOrLength 
7
+			: new Uint8Array(bufferOrLength);
8
+
9
+		this.byteLength = this.bytes.length;
10
+	}
11
+	else {
12
+		this.bytes = new Uint8Array(bufferOrLength || 1024*1024);	
13
+		this.byteLength = 0;
14
+	}
15
+
16
+	this.mode = mode || BitBuffer.MODE.EXPAND;
17
+	this.index = 0;
18
+};
19
+
20
+BitBuffer.prototype.resize = function(size) {
21
+	var newBytes = new Uint8Array(size);
22
+	if (this.byteLength !== 0) {
23
+		this.byteLength = Math.min(this.byteLength, size);
24
+		newBytes.set(this.bytes, 0, this.byteLength);
25
+	}
26
+	this.bytes = newBytes;
27
+	this.index = Math.min(this.index, this.byteLength << 3);
28
+};
29
+
30
+BitBuffer.prototype.evict = function(sizeNeeded) {
31
+	var bytePos = this.index >> 3,
32
+		available = this.bytes.length - this.byteLength;
33
+	
34
+	// If the current index is the write position, we can simply reset both
35
+	// to 0. Also reset (and throw away yet unread data) if we won't be able
36
+	// to fit the new data in even after a normal eviction.
37
+	if (
38
+		this.index === this.byteLength << 3 ||
39
+		sizeNeeded > available + bytePos // emergency evac
40
+	) {
41
+		this.byteLength = 0;
42
+		this.index = 0;
43
+		return;
44
+	}
45
+	else if (bytePos === 0) {
46
+		// Nothing read yet - we can't evict anything
47
+		return;
48
+	}
49
+	
50
+	// Some browsers don't support copyWithin() yet - we may have to do 
51
+	// it manually using set and a subarray
52
+	if (this.bytes.copyWithin) {
53
+		this.bytes.copyWithin(0, bytePos, this.byteLength);
54
+	}
55
+	else {
56
+		this.bytes.set(this.bytes.subarray(bytePos, this.byteLength));
57
+	}
58
+
59
+	this.byteLength = this.byteLength - bytePos;
60
+	this.index -= bytePos << 3;
61
+	return;
62
+};
63
+
64
+BitBuffer.prototype.write = function(buffers) {
65
+	var isArrayOfBuffers = (typeof(buffers[0]) === 'object'),
66
+		totalLength = 0,
67
+		available = this.bytes.length - this.byteLength;
68
+
69
+	// Calculate total byte length
70
+	if (isArrayOfBuffers) {
71
+		var totalLength = 0;
72
+		for (var i = 0; i < buffers.length; i++) {
73
+			totalLength += buffers[i].byteLength;
74
+		}
75
+	}
76
+	else {
77
+		totalLength = buffers.byteLength;
78
+	}
79
+
80
+	// Do we need to resize or evict?
81
+	if (totalLength > available) {
82
+		if (this.mode === BitBuffer.MODE.EXPAND) {
83
+			var newSize = Math.max(
84
+				this.bytes.length * 2,
85
+				totalLength - available
86
+			);
87
+			this.resize(newSize)
88
+		}
89
+		else {
90
+			this.evict(totalLength);
91
+		}
92
+	}
93
+
94
+	if (isArrayOfBuffers) {
95
+		for (var i = 0; i < buffers.length; i++) {
96
+			this.appendSingleBuffer(buffers[i]);
97
+		}
98
+	}
99
+	else {
100
+		this.appendSingleBuffer(buffers);
101
+	}
102
+};
103
+
104
+BitBuffer.prototype.appendSingleBuffer = function(buffer) {
105
+	buffer = buffer instanceof Uint8Array
106
+		? buffer 
107
+		: new Uint8Array(buffer);
108
+	
109
+	this.bytes.set(buffer, this.byteLength);
110
+	this.byteLength += buffer.length;
111
+};
112
+
113
+BitBuffer.prototype.findNextStartCode = function() {	
114
+	for (var i = (this.index+7 >> 3); i < this.byteLength; i++) {
115
+		if(
116
+			this.bytes[i] == 0x00 &&
117
+			this.bytes[i+1] == 0x00 &&
118
+			this.bytes[i+2] == 0x01
119
+		) {
120
+			this.index = (i+4) << 3;
121
+			return this.bytes[i+3];
122
+		}
123
+	}
124
+	this.index = (this.byteLength << 3);
125
+	return -1;
126
+};
127
+
128
+BitBuffer.prototype.findStartCode = function(code) {
129
+	var current = 0;
130
+	while (true) {
131
+		current = this.findNextStartCode();
132
+		if (current === code || current === -1) {
133
+			return current;
134
+		}
135
+	}
136
+	return -1;
137
+};
138
+
139
+BitBuffer.prototype.nextBytesAreStartCode = function() {
140
+	var i = (this.index+7 >> 3);
141
+	return (
142
+		i >= this.byteLength || (
143
+			this.bytes[i] == 0x00 && 
144
+			this.bytes[i+1] == 0x00 &&
145
+			this.bytes[i+2] == 0x01
146
+		)
147
+	);
148
+};
149
+
150
+BitBuffer.prototype.peek = function(count) {
151
+	var offset = this.index;
152
+	var value = 0;
153
+	while (count) {
154
+		var currentByte = this.bytes[offset >> 3],
155
+			remaining = 8 - (offset & 7), // remaining bits in byte
156
+			read = remaining < count ? remaining : count, // bits in this run
157
+			shift = remaining - read,
158
+			mask = (0xff >> (8-read));
159
+
160
+		value = (value << read) | ((currentByte & (mask << shift)) >> shift);
161
+
162
+		offset += read;
163
+		count -= read;
164
+	}
165
+
166
+	return value;
167
+}
168
+
169
+BitBuffer.prototype.read = function(count) {
170
+	var value = this.peek(count);
171
+	this.index += count;
172
+	return value;
173
+};
174
+
175
+BitBuffer.prototype.skip = function(count) {
176
+	return (this.index += count);
177
+};
178
+
179
+BitBuffer.prototype.rewind = function(count) {
180
+	this.index = Math.max(this.index - count, 0);
181
+};
182
+
183
+BitBuffer.prototype.has = function(count) {
184
+	return ((this.byteLength << 3) - this.index) >= count;
185
+};
186
+
187
+BitBuffer.MODE = {
188
+	EVICT: 1,
189
+	EXPAND: 2
190
+};
191
+
192
+return BitBuffer;
193
+
194
+})();
195
+
196
+

+ 115 - 0
src/canvas2d.js Ver fichero

@@ -0,0 +1,115 @@
1
+JSMpeg.Renderer.Canvas2D = (function(){ "use strict";
2
+
3
+var CanvasRenderer = function(options) {
4
+	this.canvas = options.canvas || document.createElement('canvas');
5
+	this.width = this.canvas.width;
6
+	this.height = this.canvas.height;
7
+	this.enabled = true;
8
+
9
+	this.context = this.canvas.getContext('2d');
10
+};
11
+
12
+CanvasRenderer.prototype.resize = function(width, height) {
13
+	this.width = width|0;
14
+	this.height = height|0;
15
+
16
+	this.canvas.width = this.width;
17
+	this.canvas.height = this.height;
18
+
19
+	this.imageData = this.context.getImageData(0, 0, this.width, this.height);
20
+	this.imageData.data.fill(255);
21
+};
22
+
23
+CanvasRenderer.prototype.renderProgress = function(progress) {
24
+	var 
25
+		w = this.canvas.width,
26
+		h = this.canvas.height,
27
+		ctx = this.context;
28
+
29
+	ctx.fillStyle = '#222';
30
+	ctx.fillRect(0, 0, w, h);
31
+	ctx.fillStyle = '#fff';
32
+	ctx.fillRect(0, h - h * progress, w, h * progress);
33
+};
34
+
35
+CanvasRenderer.prototype.render = function(y, cb, cr) {
36
+	this.YCbCrToRGBA(y, cb, cr, this.imageData.data);
37
+	this.context.putImageData(this.imageData, 0, 0);
38
+};
39
+
40
+CanvasRenderer.prototype.YCbCrToRGBA = function(y, cb, cr, rgba) {
41
+	if (!this.enabled) {
42
+		return;
43
+	}
44
+
45
+	// Chroma values are the same for each block of 4 pixels, so we proccess
46
+	// 2 lines at a time, 2 neighboring pixels each.
47
+	// I wish we could use 32bit writes to the RGBA buffer instead of writing
48
+	// each byte separately, but we need the automatic clamping of the RGBA
49
+	// buffer.
50
+	
51
+	var w = ((this.width + 15) >> 4) << 4,
52
+		w2 = w >> 1;
53
+
54
+	var yIndex1 = 0,
55
+		yIndex2 = w,
56
+		yNext2Lines = w + (w - this.width);
57
+
58
+	var cIndex = 0,
59
+		cNextLine = w2 - (this.width >> 1);
60
+
61
+	var rgbaIndex1 = 0,
62
+		rgbaIndex2 = this.width * 4,
63
+		rgbaNext2Lines = this.width * 4;
64
+
65
+	var cols = this.width >> 1,
66
+		rows = this.height >> 1;
67
+
68
+	var ccb, ccr, r, g, b;
69
+
70
+	for (var row = 0; row < rows; row++) {
71
+		for (var col = 0; col < cols; col++) {
72
+			ccb = cb[cIndex];
73
+			ccr = cr[cIndex];
74
+			cIndex++;
75
+
76
+			r = (ccb + ((ccb * 103) >> 8)) - 179;
77
+			g = ((ccr * 88) >> 8) - 44 + ((ccb * 183) >> 8) - 91;
78
+			b = (ccr + ((ccr * 198) >> 8)) - 227;
79
+
80
+			// Line 1
81
+			var y1 = y[yIndex1++];
82
+			var y2 = y[yIndex1++];
83
+			rgba[rgbaIndex1]   = y1 + r;
84
+			rgba[rgbaIndex1+1] = y1 - g;
85
+			rgba[rgbaIndex1+2] = y1 + b;
86
+			rgba[rgbaIndex1+4] = y2 + r;
87
+			rgba[rgbaIndex1+5] = y2 - g;
88
+			rgba[rgbaIndex1+6] = y2 + b;
89
+			rgbaIndex1 += 8;
90
+
91
+			// Line 2
92
+			var y3 = y[yIndex2++];
93
+			var y4 = y[yIndex2++];
94
+			rgba[rgbaIndex2]   = y3 + r;
95
+			rgba[rgbaIndex2+1] = y3 - g;
96
+			rgba[rgbaIndex2+2] = y3 + b;
97
+			rgba[rgbaIndex2+4] = y4 + r;
98
+			rgba[rgbaIndex2+5] = y4 - g;
99
+			rgba[rgbaIndex2+6] = y4 + b;
100
+			rgbaIndex2 += 8;
101
+		}
102
+
103
+		yIndex1 += yNext2Lines;
104
+		yIndex2 += yNext2Lines;
105
+		rgbaIndex1 += rgbaNext2Lines;
106
+		rgbaIndex2 += rgbaNext2Lines;
107
+		cIndex += cNextLine;
108
+	}
109
+};
110
+
111
+return CanvasRenderer;
112
+
113
+})();
114
+
115
+

+ 96 - 0
src/decoder.js Ver fichero

@@ -0,0 +1,96 @@
1
+JSMpeg.Decoder.Base = (function(){ "use strict";
2
+
3
+var BaseDecoder = function(options) {
4
+	this.destination = null;
5
+	this.canPlay = false;
6
+
7
+	this.collectTimestamps = !options.streaming;
8
+	this.timestamps = [];
9
+	this.timestampIndex = 0;
10
+
11
+	this.startTime = 0;
12
+	this.decodedTime = 0;
13
+
14
+	Object.defineProperty(this, 'currentTime', {get: this.getCurrentTime});
15
+};
16
+
17
+BaseDecoder.prototype.connect = function(destination) {
18
+	this.destination = destination;
19
+};
20
+
21
+BaseDecoder.prototype.write = function(pts, buffers) {
22
+	if (this.collectTimestamps) {
23
+		if (this.timestamps.length === 0) {
24
+			this.startTime = pts;
25
+			this.decodedTime = pts;
26
+		}
27
+		this.timestamps.push({index: this.bits.byteLength << 3, time: pts});
28
+	}
29
+
30
+	this.bits.write(buffers);
31
+	this.canPlay = true;
32
+};
33
+
34
+BaseDecoder.prototype.seek = function(time) {
35
+	if (!this.collectTimestamps) {
36
+		return;
37
+	}
38
+
39
+	this.timestampIndex = 0;
40
+	for (var i = 0; i < this.timestamps.length; i++) {
41
+		if (this.timestamps[i].time > time) {
42
+			break;
43
+		}
44
+		this.timestampIndex = i;
45
+	}
46
+
47
+	var ts = this.timestamps[this.timestampIndex];
48
+	if (ts) {
49
+		this.bits.index = ts.index;
50
+		this.decodedTime = ts.time;
51
+	}
52
+	else {
53
+		this.bits.index = 0;
54
+		this.decodedTime = this.startTime;
55
+	}
56
+};
57
+
58
+BaseDecoder.prototype.decode = function() {
59
+	this.advanceDecodedTime(0);
60
+};
61
+
62
+BaseDecoder.prototype.advanceDecodedTime = function(seconds) {
63
+	if (this.collectTimestamps) {
64
+		var newTimestampIndex = -1;
65
+		for (var i = this.timestampIndex; i < this.timestamps.length; i++) {
66
+			if (this.timestamps[i].index > this.bits.index) {
67
+				break;
68
+			}
69
+			newTimestampIndex = i;
70
+		}
71
+
72
+		// Did we find a new PTS, different from the last? If so, we don't have
73
+		// to advance the decoded time manually and can instead sync it exactly
74
+		// to the PTS.
75
+		if (
76
+			newTimestampIndex !== -1 && 
77
+			newTimestampIndex !== this.timestampIndex
78
+		) {
79
+			this.timestampIndex = newTimestampIndex;
80
+			this.decodedTime = this.timestamps[this.timestampIndex].time;
81
+			return;
82
+		}
83
+	}
84
+
85
+	this.decodedTime += seconds;
86
+};
87
+
88
+BaseDecoder.prototype.getCurrentTime = function() {
89
+	return this.decodedTime;
90
+};
91
+
92
+return BaseDecoder;
93
+
94
+})();
95
+
96
+

+ 94 - 0
src/jsmpeg.js Ver fichero

@@ -0,0 +1,94 @@
1
+/*! jsmpeg v1.0 | (c) Dominic Szablewski | MIT license */
2
+
3
+
4
+// This sets up the JSMpeg "Namespace". The object is empty apart from the Now()
5
+// utility function and the automatic CreateVideoElements() after DOMReady.
6
+var JSMpeg = {
7
+
8
+	// The Player sets up the connections between source, demuxer, decoders,
9
+	// renderer and audio output. It ties everything together, is responsible
10
+	// of scheduling decoding and provides some convenience methods for
11
+	// external users.
12
+	Player: null,
13
+
14
+	// A Video Element wraps the Player, shows HTML controls to start/pause
15
+	// the video and handles Audio unlocking on iOS. VideoElements can be
16
+	// created directly in HTML using the <div class="jsmpeg"/> tag.
17
+	VideoElement: null,
18
+	
19
+	// The BitBuffer wraps a Uint8Array and allows reading an arbitrary number
20
+	// of bits at a time. On writing, the BitBuffer either expands its
21
+	// internal buffer (for static files) or deletes old data (for streaming).
22
+	BitBuffer: null,
23
+
24
+	// A Source provides raw data from HTTP, a WebSocket connection or any
25
+	// other mean. Sources must support the following API:
26
+	//   .connect(destinationNode)
27
+	//   .write(buffer)
28
+	//   .start() - start reading
29
+	//   .resume(headroom) - continue reading; headroom to play pos in seconds
30
+	//   .established - boolean, true after connection is established
31
+	//   .completed - boolean, true if the source is completely loaded
32
+	//   .progress - float 0-1
33
+	Source: {}, 
34
+
35
+	// A Demuxer may sit between a Source and a Decoder. It separates the
36
+	// incoming raw data into Video, Audio and other Streams. API:
37
+	//   .connect(streamId, destinationNode)
38
+	//   .write(buffer)
39
+	//   .currentTime – float, in seconds
40
+	//   .startTime - float, in seconds
41
+	Demuxer: {},
42
+
43
+	// A Decoder accepts an incoming Stream of raw Audio or Video data, buffers
44
+	// it and upon `.decode()` decodes a single frame of data. Video decoders
45
+	// call `destinationNode.render(Y, Cr, CB)` with the decoded pixel data;
46
+	// Audio decoders call `destinationNode.play(left, right)` with the decoded
47
+	// PCM data. API:
48
+	//   .connect(destinationNode)
49
+	//   .write(pts, buffer)
50
+	//   .decode()
51
+	//   .seek(time)
52
+	//   .currentTime - float, in seconds
53
+	//   .startTime - float, in seconds
54
+	Decoder: {},
55
+
56
+	// A Renderer accepts raw YCrCb data in 3 separate buffers via the render()
57
+	// method. Renderers typically convert the data into the RGBA color space
58
+	// and draw it on a Canvas, but other output - such as writing PNGs - would
59
+	// be conceivable. API:
60
+	//   .render(y, cr, cb) - pixel data as Uint8Arrays
61
+	//   .enabled - wether the renderer does anything upon receiving data
62
+	Renderer: {},
63
+
64
+	// Audio Outputs accept raw Stero PCM data in 2 separate buffers via the
65
+	// play() method. Outputs typically play the audio on the user's device.
66
+	// API:
67
+	//   .play(sampleRate, left, right) - rate in herz; PCM data as Uint8Arrays
68
+	//   .stop()
69
+	//   .enqueuedTime - float, in seconds
70
+	//   .enabled - wether the output does anything upon receiving data
71
+	AudioOutput: {}, 
72
+
73
+	Now: function() {
74
+		return window.performance 
75
+			? window.performance.now() / 1000
76
+			: Date.now() / 1000;
77
+	},
78
+
79
+	CreateVideoElements: function() {
80
+		var elements = document.querySelectorAll('.jsmpeg');
81
+		for (var i = 0; i < elements.length; i++) {
82
+			new JSMpeg.VideoElement(elements[i]);
83
+		}
84
+	}
85
+};
86
+
87
+// Automatically create players for all found <div class="jsmpeg"/> elements.
88
+if (document.readyState === 'complete') {
89
+	JSMpeg.CreateVideoElements();
90
+}
91
+else {
92
+	document.addEventListener('DOMContentLoaded', JSMpeg.CreateVideoElements);
93
+}
94
+

+ 682 - 0
src/mp2.js Ver fichero

@@ -0,0 +1,682 @@
1
+JSMpeg.Decoder.MP2Audio = (function(){ "use strict";
2
+
3
+// Based on kjmp2 by Martin J. Fiedler
4
+// http://keyj.emphy.de/kjmp2/
5
+
6
+var MP2 = function(options) {
7
+	JSMpeg.Decoder.Base.call(this, options);
8
+
9
+	var bufferSize = options.audioBufferSize || 128*1024;
10
+	var bufferMode = options.streaming
11
+		? JSMpeg.BitBuffer.MODE.EVICT
12
+		: JSMpeg.BitBuffer.MODE.EXPAND;
13
+
14
+	this.bits = new JSMpeg.BitBuffer(bufferSize, bufferMode);
15
+
16
+	this.left = new Float32Array(1152);
17
+	this.right = new Float32Array(1152);
18
+	this.sampleRate = 44100;
19
+	
20
+	this.D = new Float32Array(1024);
21
+	this.D.set(MP2.SYNTHESIS_WINDOW, 0);
22
+	this.D.set(MP2.SYNTHESIS_WINDOW, 512);
23
+	this.V = new Float32Array(1024);
24
+	this.U = new Int32Array(32);
25
+	this.VPos = 0;
26
+
27
+	this.allocation = [new Array(32), new Array(32)];
28
+	this.scaleFactorInfo = [new Uint8Array(32), new Uint8Array(32)];
29
+	this.scaleFactor = [new Array(32), new Array(32)];
30
+	this.sample = [new Array(32), new Array(32)];
31
+	
32
+	for (var j = 0; j < 2; j++) {
33
+		for (var i = 0; i < 32; i++) {
34
+			this.scaleFactor[j][i] = [0, 0, 0];
35
+			this.sample[j][i] = [0, 0, 0];
36
+		}
37
+	}
38
+};
39
+
40
+MP2.prototype = Object.create(JSMpeg.Decoder.Base.prototype);
41
+MP2.prototype.constructor = MP2;
42
+
43
+MP2.prototype.decode = function() {
44
+	var pos = this.bits.index >> 3;
45
+	if (pos >= this.bits.byteLength) {
46
+		return false;
47
+	}
48
+
49
+	var decoded = this.decodeFrame(this.left, this.right);
50
+	this.bits.index = (pos + decoded) << 3;
51
+
52
+	if (!decoded) {
53
+		return false;
54
+	}
55
+
56
+	if (this.destination) {
57
+		this.destination.play(this.sampleRate, this.left, this.right);
58
+	}
59
+
60
+	this.advanceDecodedTime(this.left.length / this.sampleRate);
61
+	return true;
62
+};
63
+
64
+MP2.prototype.getCurrentTime = function() {
65
+	var enqueuedTime = this.destination ? this.destination.enqueuedTime : 0;
66
+	return this.decodedTime - enqueuedTime;
67
+};
68
+
69
+MP2.prototype.decodeFrame = function(left, right) {
70
+	// Check for valid header: syncword OK, MPEG-Audio Layer 2
71
+	var sync = this.bits.read(11),
72
+		version = this.bits.read(2),
73
+		layer = this.bits.read(2),
74
+		hasCRC = !this.bits.read(1);
75
+
76
+	if (
77
+		sync !== MP2.FRAME_SYNC ||
78
+		version !== MP2.VERSION.MPEG_1 ||
79
+		layer !== MP2.LAYER.II
80
+	) {
81
+		return 0; // Invalid header or unsupported version
82
+	}
83
+
84
+	var bitrateIndex = this.bits.read(4) - 1;
85
+	if (bitrateIndex > 13) {
86
+		return 0;  // Invalid bit rate or 'free format'
87
+	}
88
+
89
+	var sampleRateIndex = this.bits.read(2);
90
+	var sampleRate = MP2.SAMPLE_RATE[sampleRateIndex];
91
+	if (sampleRateIndex === 3) {
92
+		return 0; // Invalid sample rate
93
+	}
94
+	if (version === MP2.VERSION.MPEG_2) {
95
+		sampleRateIndex += 4;
96
+		bitrateIndex += 14;
97
+	}
98
+	var padding = this.bits.read(1),
99
+		privat = this.bits.read(1),
100
+		mode = this.bits.read(2);
101
+
102
+	// Parse the mode_extension, set up the stereo bound
103
+	var bound = 0;
104
+	if (mode === MP2.MODE.JOINT_STEREO) {
105
+		bound = (this.bits.read(2) + 1) << 2;
106
+	}
107
+	else {
108
+		this.bits.skip(2);
109
+		bound = (mode === MP2.MODE.MONO) ? 0 : 32;
110
+	}
111
+
112
+	// Discard the last 4 bits of the header and the CRC value, if present
113
+	this.bits.skip(4);
114
+	if (hasCRC) {
115
+		this.bits.skip(16);
116
+	}
117
+
118
+	// Compute the frame size
119
+	var bitrate = MP2.BIT_RATE[bitrateIndex],
120
+		sampleRate = MP2.SAMPLE_RATE[sampleRateIndex],
121
+		frameSize = ((144000 * bitrate / sampleRate) + padding)|0;
122
+	
123
+
124
+	// Prepare the quantizer table lookups
125
+	var tab3 = 0;
126
+	var sblimit = 0;
127
+	if (version === MP2.VERSION.MPEG_2) {
128
+		// MPEG-2 (LSR)
129
+		tab3 = 2;
130
+		sblimit = 30;
131
+	}
132
+	else {
133
+		// MPEG-1
134
+		var tab1 = (mode === MP2.MODE.MONO) ? 0 : 1;
135
+		var tab2 = MP2.QUANT_LUT_STEP_1[tab1][bitrateIndex];
136
+		tab3 = MP2.QUANT_LUT_STEP_2[tab2][sampleRateIndex];
137
+		sblimit = tab3 & 63;
138
+		tab3 >>= 6;
139
+	}
140
+
141
+	if (bound > sblimit) {
142
+		bound = sblimit;
143
+	}
144
+
145
+	// Read the allocation information
146
+	for (var sb = 0; sb < bound; sb++) {
147
+		this.allocation[0][sb] = this.readAllocation(sb, tab3);
148
+		this.allocation[1][sb] = this.readAllocation(sb, tab3);
149
+	}
150
+
151
+	for (var sb = bound; sb < sblimit; sb++) {
152
+		this.allocation[0][sb] = 
153
+			this.allocation[1][sb] =
154
+			this.readAllocation(sb, tab3);
155
+	}
156
+
157
+	// Read scale factor selector information
158
+	var channels = (mode === MP2.MODE.MONO) ? 1 : 2;
159
+	for (var sb = 0;  sb < sblimit; sb++) {
160
+		for (ch = 0;  ch < channels; ch++) {
161
+			if (this.allocation[ch][sb]) {
162
+				this.scaleFactorInfo[ch][sb] = this.bits.read(2);
163
+			}
164
+		}
165
+		if (mode === MP2.MODE.MONO) {
166
+			this.scaleFactorInfo[1][sb] = this.scaleFactorInfo[0][sb];
167
+		}
168
+	}
169
+
170
+	// Read scale factors
171
+	for (var sb = 0;  sb < sblimit; sb++) {
172
+		for (var ch = 0;  ch < channels; ch++) {
173
+			if (this.allocation[ch][sb]) {
174
+				var sf = this.scaleFactor[ch][sb];
175
+				switch (this.scaleFactorInfo[ch][sb]) {
176
+					case 0:
177
+						sf[0] = this.bits.read(6);
178
+						sf[1] = this.bits.read(6);
179
+						sf[2] = this.bits.read(6);
180
+						break;
181
+					case 1:
182
+						sf[0] =
183
+						sf[1] = this.bits.read(6);
184
+						sf[2] = this.bits.read(6);
185
+						break;
186
+					case 2:
187
+						sf[0] =
188
+						sf[1] =
189
+						sf[2] = this.bits.read(6);
190
+						break;
191
+					case 3:
192
+						sf[0] = this.bits.read(6);
193
+						sf[1] =
194
+						sf[2] = this.bits.read(6);
195
+						break;
196
+				}
197
+			}
198
+		}
199
+		if (mode === MP2.MODE.MONO) {
200
+			this.scaleFactor[1][sb][0] = this.scaleFactor[0][sb][0];
201
+			this.scaleFactor[1][sb][1] = this.scaleFactor[0][sb][1];
202
+			this.scaleFactor[1][sb][2] = this.scaleFactor[0][sb][2];
203
+		}
204
+	}
205
+
206
+	// Coefficient input and reconstruction
207
+	var outPos = 0;
208
+	for (var part = 0; part < 3; part++) {
209
+		for (var granule = 0; granule < 4; granule++) {
210
+
211
+			// Read the samples
212
+			for (var sb = 0; sb < bound; sb++) {
213
+				this.readSamples(0, sb, part);
214
+				this.readSamples(1, sb, part);
215
+			}
216
+			for (var sb = bound; sb < sblimit; sb++) {
217
+				this.readSamples(0, sb, part);
218
+				this.sample[1][sb][0] = this.sample[0][sb][0];
219
+				this.sample[1][sb][1] = this.sample[0][sb][1];
220
+				this.sample[1][sb][2] = this.sample[0][sb][2];
221
+			}
222
+			for (var sb = sblimit; sb < 32; sb++) {
223
+				this.sample[0][sb][0] = 0;
224
+				this.sample[0][sb][1] = 0;
225
+				this.sample[0][sb][2] = 0;
226
+				this.sample[1][sb][0] = 0;
227
+				this.sample[1][sb][1] = 0;
228
+				this.sample[1][sb][2] = 0;
229
+			}
230
+
231
+			// Synthesis loop
232
+			for (var p = 0; p < 3; p++) {
233
+				// Shifting step
234
+				this.VPos = (this.VPos - 64) & 1023;
235
+
236
+				for (var ch = 0;  ch < 2; ch++) {
237
+					MP2.MatrixTransform(this.sample[ch], p, this.V, this.VPos);
238
+
239
+					// Build U, windowing, calculate output
240
+					this.U.fill(0);
241
+
242
+					var dIndex = 512 - (this.VPos >> 1);
243
+					var vIndex = (this.VPos % 128) >> 1;
244
+					while (vIndex < 1024) {
245
+						for (var i = 0; i < 32; ++i) {
246
+							this.U[i] += this.D[dIndex++] * this.V[vIndex++];
247
+						}
248
+
249
+						vIndex += 128-32;
250
+						dIndex += 64-32;
251
+					}
252
+
253
+					vIndex = (128-32 + 1024) - vIndex;
254
+					dIndex -= (512 - 32);
255
+					while (vIndex < 1024) {
256
+						for (var i = 0; i < 32; ++i) {
257
+							this.U[i] += this.D[dIndex++] * this.V[vIndex++];
258
+						}
259
+
260
+						vIndex += 128-32;
261
+						dIndex += 64-32;
262
+					}
263
+
264
+					// Output samples
265
+					var outChannel = ch === 0 ? left : right;
266
+					for (var j = 0; j < 32; j++) {
267
+						outChannel[outPos + j] = this.U[j] / 2147418112;
268
+					}
269
+				} // End of synthesis channel loop
270
+				outPos += 32;
271
+			} // End of synthesis sub-block loop
272
+
273
+		} // Decoding of the granule finished
274
+	}
275
+
276
+	this.sampleRate = sampleRate;
277
+	return frameSize;
278
+};
279
+
280
+MP2.prototype.readAllocation = function(sb, tab3) {
281
+	var tab4 = MP2.QUANT_LUT_STEP_3[tab3][sb];
282
+	var qtab = MP2.QUANT_LUT_STEP4[tab4 & 15][this.bits.read(tab4 >> 4)];
283
+	return qtab ? (MP2.QUANT_TAB[qtab - 1]) : 0;
284
+};
285
+
286
+MP2.prototype.readSamples = function(ch, sb, part) {
287
+	var q = this.allocation[ch][sb],
288
+		sf = this.scaleFactor[ch][sb][part],
289
+		sample = this.sample[ch][sb],
290
+		val = 0;
291
+
292
+	if (!q) {
293
+		// No bits allocated for this subband
294
+		sample[0] = sample[1] = sample[2] = 0;
295
+		return;
296
+	}
297
+
298
+	// Resolve scalefactor
299
+	if (sf === 63) {
300
+		sf = 0;
301
+	}
302
+	else {
303
+		var shift = (sf / 3)|0;
304
+		sf = (MP2.SCALEFACTOR_BASE[sf % 3] + ((1 << shift) >> 1)) >> shift;
305
+	}
306
+
307
+	// Decode samples
308
+	var adj = q.levels;
309
+	if (q.group) {
310
+		// Decode grouped samples
311
+		val = this.bits.read(q.bits);
312
+		sample[0] = val % adj;
313
+		val = (val / adj)|0;
314
+		sample[1] = val % adj;
315
+		sample[2] = (val / adj)|0;
316
+	}
317
+	else {
318
+		// Decode direct samples
319
+		sample[0] = this.bits.read(q.bits);
320
+		sample[1] = this.bits.read(q.bits);
321
+		sample[2] = this.bits.read(q.bits);
322
+	}
323
+
324
+	// Postmultiply samples
325
+	var scale = (65536 / (adj + 1))|0;
326
+	adj = ((adj + 1) >> 1) - 1;
327
+
328
+	val = (adj - sample[0]) * scale;
329
+	sample[0] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
330
+
331
+	val = (adj - sample[1]) * scale;
332
+	sample[1] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
333
+
334
+	val = (adj - sample[2]) * scale;
335
+	sample[2] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
336
+};
337
+
338
+MP2.MatrixTransform = function(s, ss, d, dp) {
339
+	var t01, t02, t03, t04, t05, t06, t07, t08, t09, t10, t11, t12,
340
+		t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24,
341
+		t25, t26, t27, t28, t29, t30, t31, t32, t33;
342
+
343
+	t01 = s[ 0][ss] + s[31][ss]; t02 = (s[ 0][ss] - s[31][ss]) * 0.500602998235;
344
+	t03 = s[ 1][ss] + s[30][ss]; t04 = (s[ 1][ss] - s[30][ss]) * 0.505470959898;
345
+	t05 = s[ 2][ss] + s[29][ss]; t06 = (s[ 2][ss] - s[29][ss]) * 0.515447309923;
346
+	t07 = s[ 3][ss] + s[28][ss]; t08 = (s[ 3][ss] - s[28][ss]) * 0.53104259109;
347
+	t09 = s[ 4][ss] + s[27][ss]; t10 = (s[ 4][ss] - s[27][ss]) * 0.553103896034;
348
+	t11 = s[ 5][ss] + s[26][ss]; t12 = (s[ 5][ss] - s[26][ss]) * 0.582934968206;
349
+	t13 = s[ 6][ss] + s[25][ss]; t14 = (s[ 6][ss] - s[25][ss]) * 0.622504123036;
350
+	t15 = s[ 7][ss] + s[24][ss]; t16 = (s[ 7][ss] - s[24][ss]) * 0.674808341455;
351
+	t17 = s[ 8][ss] + s[23][ss]; t18 = (s[ 8][ss] - s[23][ss]) * 0.744536271002;
352
+	t19 = s[ 9][ss] + s[22][ss]; t20 = (s[ 9][ss] - s[22][ss]) * 0.839349645416;
353
+	t21 = s[10][ss] + s[21][ss]; t22 = (s[10][ss] - s[21][ss]) * 0.972568237862;
354
+	t23 = s[11][ss] + s[20][ss]; t24 = (s[11][ss] - s[20][ss]) * 1.16943993343;
355
+	t25 = s[12][ss] + s[19][ss]; t26 = (s[12][ss] - s[19][ss]) * 1.48416461631;
356
+	t27 = s[13][ss] + s[18][ss]; t28 = (s[13][ss] - s[18][ss]) * 2.05778100995;
357
+	t29 = s[14][ss] + s[17][ss]; t30 = (s[14][ss] - s[17][ss]) * 3.40760841847;
358
+	t31 = s[15][ss] + s[16][ss]; t32 = (s[15][ss] - s[16][ss]) * 10.1900081235;
359
+
360
+	t33 = t01 + t31; t31 = (t01 - t31) * 0.502419286188;
361
+	t01 = t03 + t29; t29 = (t03 - t29) * 0.52249861494;
362
+	t03 = t05 + t27; t27 = (t05 - t27) * 0.566944034816;
363
+	t05 = t07 + t25; t25 = (t07 - t25) * 0.64682178336;
364
+	t07 = t09 + t23; t23 = (t09 - t23) * 0.788154623451;
365
+	t09 = t11 + t21; t21 = (t11 - t21) * 1.06067768599;
366
+	t11 = t13 + t19; t19 = (t13 - t19) * 1.72244709824;
367
+	t13 = t15 + t17; t17 = (t15 - t17) * 5.10114861869;
368
+	t15 = t33 + t13; t13 = (t33 - t13) * 0.509795579104;
369
+	t33 = t01 + t11; t01 = (t01 - t11) * 0.601344886935;
370
+	t11 = t03 + t09; t09 = (t03 - t09) * 0.899976223136;
371
+	t03 = t05 + t07; t07 = (t05 - t07) * 2.56291544774;
372
+	t05 = t15 + t03; t15 = (t15 - t03) * 0.541196100146;
373
+	t03 = t33 + t11; t11 = (t33 - t11) * 1.30656296488;
374
+	t33 = t05 + t03; t05 = (t05 - t03) * 0.707106781187;
375
+	t03 = t15 + t11; t15 = (t15 - t11) * 0.707106781187;
376
+	t03 += t15;
377
+	t11 = t13 + t07; t13 = (t13 - t07) * 0.541196100146;
378
+	t07 = t01 + t09; t09 = (t01 - t09) * 1.30656296488;
379
+	t01 = t11 + t07; t07 = (t11 - t07) * 0.707106781187;
380
+	t11 = t13 + t09; t13 = (t13 - t09) * 0.707106781187;
381
+	t11 += t13; t01 += t11; 
382
+	t11 += t07; t07 += t13;
383
+	t09 = t31 + t17; t31 = (t31 - t17) * 0.509795579104;
384
+	t17 = t29 + t19; t29 = (t29 - t19) * 0.601344886935;
385
+	t19 = t27 + t21; t21 = (t27 - t21) * 0.899976223136;
386
+	t27 = t25 + t23; t23 = (t25 - t23) * 2.56291544774;
387
+	t25 = t09 + t27; t09 = (t09 - t27) * 0.541196100146;
388
+	t27 = t17 + t19; t19 = (t17 - t19) * 1.30656296488;
389
+	t17 = t25 + t27; t27 = (t25 - t27) * 0.707106781187;
390
+	t25 = t09 + t19; t19 = (t09 - t19) * 0.707106781187;
391
+	t25 += t19;
392
+	t09 = t31 + t23; t31 = (t31 - t23) * 0.541196100146;
393
+	t23 = t29 + t21; t21 = (t29 - t21) * 1.30656296488;
394
+	t29 = t09 + t23; t23 = (t09 - t23) * 0.707106781187;
395
+	t09 = t31 + t21; t31 = (t31 - t21) * 0.707106781187;
396
+	t09 += t31;	t29 += t09;	t09 += t23;	t23 += t31;
397
+	t17 += t29;	t29 += t25;	t25 += t09;	t09 += t27;
398
+	t27 += t23;	t23 += t19; t19 += t31;	
399
+	t21 = t02 + t32; t02 = (t02 - t32) * 0.502419286188;
400
+	t32 = t04 + t30; t04 = (t04 - t30) * 0.52249861494;
401
+	t30 = t06 + t28; t28 = (t06 - t28) * 0.566944034816;
402
+	t06 = t08 + t26; t08 = (t08 - t26) * 0.64682178336;
403
+	t26 = t10 + t24; t10 = (t10 - t24) * 0.788154623451;
404
+	t24 = t12 + t22; t22 = (t12 - t22) * 1.06067768599;
405
+	t12 = t14 + t20; t20 = (t14 - t20) * 1.72244709824;
406
+	t14 = t16 + t18; t16 = (t16 - t18) * 5.10114861869;
407
+	t18 = t21 + t14; t14 = (t21 - t14) * 0.509795579104;
408
+	t21 = t32 + t12; t32 = (t32 - t12) * 0.601344886935;
409
+	t12 = t30 + t24; t24 = (t30 - t24) * 0.899976223136;
410
+	t30 = t06 + t26; t26 = (t06 - t26) * 2.56291544774;
411
+	t06 = t18 + t30; t18 = (t18 - t30) * 0.541196100146;
412
+	t30 = t21 + t12; t12 = (t21 - t12) * 1.30656296488;
413
+	t21 = t06 + t30; t30 = (t06 - t30) * 0.707106781187;
414
+	t06 = t18 + t12; t12 = (t18 - t12) * 0.707106781187;
415
+	t06 += t12;
416
+	t18 = t14 + t26; t26 = (t14 - t26) * 0.541196100146;
417
+	t14 = t32 + t24; t24 = (t32 - t24) * 1.30656296488;
418
+	t32 = t18 + t14; t14 = (t18 - t14) * 0.707106781187;
419
+	t18 = t26 + t24; t24 = (t26 - t24) * 0.707106781187;
420
+	t18 += t24; t32 += t18; 
421
+	t18 += t14; t26 = t14 + t24;
422
+	t14 = t02 + t16; t02 = (t02 - t16) * 0.509795579104;
423
+	t16 = t04 + t20; t04 = (t04 - t20) * 0.601344886935;
424
+	t20 = t28 + t22; t22 = (t28 - t22) * 0.899976223136;
425
+	t28 = t08 + t10; t10 = (t08 - t10) * 2.56291544774;
426
+	t08 = t14 + t28; t14 = (t14 - t28) * 0.541196100146;
427
+	t28 = t16 + t20; t20 = (t16 - t20) * 1.30656296488;
428
+	t16 = t08 + t28; t28 = (t08 - t28) * 0.707106781187;
429
+	t08 = t14 + t20; t20 = (t14 - t20) * 0.707106781187;
430
+	t08 += t20;
431
+	t14 = t02 + t10; t02 = (t02 - t10) * 0.541196100146;
432
+	t10 = t04 + t22; t22 = (t04 - t22) * 1.30656296488;
433
+	t04 = t14 + t10; t10 = (t14 - t10) * 0.707106781187;
434
+	t14 = t02 + t22; t02 = (t02 - t22) * 0.707106781187;
435
+	t14 += t02;	t04 += t14;	t14 += t10;	t10 += t02;
436
+	t16 += t04;	t04 += t08;	t08 += t14;	t14 += t28;
437
+	t28 += t10;	t10 += t20;	t20 += t02;	t21 += t16;
438
+	t16 += t32;	t32 += t04;	t04 += t06;	t06 += t08;
439
+	t08 += t18;	t18 += t14;	t14 += t30;	t30 += t28;
440
+	t28 += t26;	t26 += t10;	t10 += t12;	t12 += t20;
441
+	t20 += t24;	t24 += t02;
442
+
443
+	d[dp + 48] = -t33;
444
+	d[dp + 49] = d[dp + 47] = -t21;
445
+	d[dp + 50] = d[dp + 46] = -t17;
446
+	d[dp + 51] = d[dp + 45] = -t16;
447
+	d[dp + 52] = d[dp + 44] = -t01;
448
+	d[dp + 53] = d[dp + 43] = -t32;
449
+	d[dp + 54] = d[dp + 42] = -t29;
450
+	d[dp + 55] = d[dp + 41] = -t04;
451
+	d[dp + 56] = d[dp + 40] = -t03;
452
+	d[dp + 57] = d[dp + 39] = -t06;
453
+	d[dp + 58] = d[dp + 38] = -t25;
454
+	d[dp + 59] = d[dp + 37] = -t08;
455
+	d[dp + 60] = d[dp + 36] = -t11;
456
+	d[dp + 61] = d[dp + 35] = -t18;
457
+	d[dp + 62] = d[dp + 34] = -t09;
458
+	d[dp + 63] = d[dp + 33] = -t14;
459
+	d[dp + 32] = -t05;
460
+	d[dp +  0] = t05; d[dp + 31] = -t30;
461
+	d[dp +  1] = t30; d[dp + 30] = -t27;
462
+	d[dp +  2] = t27; d[dp + 29] = -t28;
463
+	d[dp +  3] = t28; d[dp + 28] = -t07;
464
+	d[dp +  4] = t07; d[dp + 27] = -t26;
465
+	d[dp +  5] = t26; d[dp + 26] = -t23;
466
+	d[dp +  6] = t23; d[dp + 25] = -t10;
467
+	d[dp +  7] = t10; d[dp + 24] = -t15;
468
+	d[dp +  8] = t15; d[dp + 23] = -t12;
469
+	d[dp +  9] = t12; d[dp + 22] = -t19;
470
+	d[dp + 10] = t19; d[dp + 21] = -t20;
471
+	d[dp + 11] = t20; d[dp + 20] = -t13;
472
+	d[dp + 12] = t13; d[dp + 19] = -t24;
473
+	d[dp + 13] = t24; d[dp + 18] = -t31;
474
+	d[dp + 14] = t31; d[dp + 17] = -t02;
475
+	d[dp + 15] = t02; d[dp + 16] =  0.0;
476
+};
477
+
478
+MP2.FRAME_SYNC = 0x7ff;
479
+
480
+MP2.VERSION = {
481
+	MPEG_2_5: 0x0,
482
+	MPEG_2: 0x2,
483
+	MPEG_1: 0x3
484
+};
485
+
486
+MP2.LAYER = {
487
+	III: 0x1,
488
+	II: 0x2,
489
+	I: 0x3
490
+};
491
+
492
+MP2.MODE = {
493
+	STEREO: 0x0,
494
+	JOINT_STEREO: 0x1,
495
+	DUAL_CHANNEL: 0x2,
496
+	MONO: 0x3
497
+};
498
+
499
+MP2.SAMPLE_RATE = new Uint16Array([
500
+	44100, 48000, 32000, 0, // MPEG-1
501
+	22050, 24000, 16000, 0  // MPEG-2
502
+]);
503
+
504
+MP2.BIT_RATE = new Uint16Array([
505
+	32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, // MPEG-1
506
+	 8, 16, 24, 32, 40, 48,  56,  64,  80,  96, 112, 128, 144, 160  // MPEG-2
507
+]);
508
+
509
+MP2.SCALEFACTOR_BASE = new Uint32Array([
510
+	0x02000000, 0x01965FEA, 0x01428A30
511
+]);
512
+
513
+MP2.SYNTHESIS_WINDOW = new Float32Array([
514
+	     0.0,     -0.5,     -0.5,     -0.5,     -0.5,     -0.5,
515
+	    -0.5,     -1.0,     -1.0,     -1.0,     -1.0,     -1.5,
516
+	    -1.5,     -2.0,     -2.0,     -2.5,     -2.5,     -3.0,
517
+	    -3.5,     -3.5,     -4.0,     -4.5,     -5.0,     -5.5,
518
+	    -6.5,     -7.0,     -8.0,     -8.5,     -9.5,    -10.5,
519
+	   -12.0,    -13.0,    -14.5,    -15.5,    -17.5,    -19.0,
520
+	   -20.5,    -22.5,    -24.5,    -26.5,    -29.0,    -31.5,
521
+	   -34.0,    -36.5,    -39.5,    -42.5,    -45.5,    -48.5,
522
+	   -52.0,    -55.5,    -58.5,    -62.5,    -66.0,    -69.5,
523
+	   -73.5,    -77.0,    -80.5,    -84.5,    -88.0,    -91.5,
524
+	   -95.0,    -98.0,   -101.0,   -104.0,    106.5,    109.0,
525
+	   111.0,    112.5,    113.5,    114.0,    114.0,    113.5,
526
+	   112.0,    110.5,    107.5,    104.0,    100.0,     94.5,
527
+	    88.5,     81.5,     73.0,     63.5,     53.0,     41.5,
528
+	    28.5,     14.5,     -1.0,    -18.0,    -36.0,    -55.5,
529
+	   -76.5,    -98.5,   -122.0,   -147.0,   -173.5,   -200.5,
530
+	  -229.5,   -259.5,   -290.5,   -322.5,   -355.5,   -389.5,
531
+	  -424.0,   -459.5,   -495.5,   -532.0,   -568.5,   -605.0,
532
+	  -641.5,   -678.0,   -714.0,   -749.0,   -783.5,   -817.0,
533
+	  -849.0,   -879.5,   -908.5,   -935.0,   -959.5,   -981.0,
534
+	 -1000.5,  -1016.0,  -1028.5,  -1037.5,  -1042.5,  -1043.5,
535
+	 -1040.0,  -1031.5,   1018.5,   1000.0,    976.0,    946.5,
536
+	   911.0,    869.5,    822.0,    767.5,    707.0,    640.0,
537
+	   565.5,    485.0,    397.0,    302.5,    201.0,     92.5,
538
+	   -22.5,   -144.0,   -272.5,   -407.0,   -547.5,   -694.0,
539
+	  -846.0,  -1003.0,  -1165.0,  -1331.5,  -1502.0,  -1675.5,
540
+	 -1852.5,  -2031.5,  -2212.5,  -2394.0,  -2576.5,  -2758.5,
541
+	 -2939.5,  -3118.5,  -3294.5,  -3467.5,  -3635.5,  -3798.5,
542
+	 -3955.0,  -4104.5,  -4245.5,  -4377.5,  -4499.0,  -4609.5,
543
+	 -4708.0,  -4792.5,  -4863.5,  -4919.0,  -4958.0,  -4979.5,
544
+	 -4983.0,  -4967.5,  -4931.5,  -4875.0,  -4796.0,  -4694.5,
545
+	 -4569.5,  -4420.0,  -4246.0,  -4046.0,  -3820.0,  -3567.0,
546
+	  3287.0,   2979.5,   2644.0,   2280.5,   1888.0,   1467.5,
547
+	  1018.5,    541.0,     35.0,   -499.0,  -1061.0,  -1650.0,
548
+	 -2266.5,  -2909.0,  -3577.0,  -4270.0,  -4987.5,  -5727.5,
549
+	 -6490.0,  -7274.0,  -8077.5,  -8899.5,  -9739.0, -10594.5,
550
+	-11464.5, -12347.0, -13241.0, -14144.5, -15056.0, -15973.5,
551
+	-16895.5, -17820.0, -18744.5, -19668.0, -20588.0, -21503.0,
552
+	-22410.5, -23308.5, -24195.0, -25068.5, -25926.5, -26767.0,
553
+	-27589.0, -28389.0, -29166.5, -29919.0, -30644.5, -31342.0,
554
+	-32009.5, -32645.0, -33247.0, -33814.5, -34346.0, -34839.5,
555
+	-35295.0, -35710.0, -36084.5, -36417.5, -36707.5, -36954.0,
556
+	-37156.5, -37315.0, -37428.0, -37496.0,  37519.0,  37496.0,
557
+	 37428.0,  37315.0,  37156.5,  36954.0,  36707.5,  36417.5,
558
+	 36084.5,  35710.0,  35295.0,  34839.5,  34346.0,  33814.5,
559
+	 33247.0,  32645.0,  32009.5,  31342.0,  30644.5,  29919.0,
560
+	 29166.5,  28389.0,  27589.0,  26767.0,  25926.5,  25068.5,
561
+	 24195.0,  23308.5,  22410.5,  21503.0,  20588.0,  19668.0,
562
+	 18744.5,  17820.0,  16895.5,  15973.5,  15056.0,  14144.5,
563
+	 13241.0,  12347.0,  11464.5,  10594.5,   9739.0,   8899.5,
564
+	  8077.5,   7274.0,   6490.0,   5727.5,   4987.5,   4270.0,
565
+	  3577.0,   2909.0,   2266.5,   1650.0,   1061.0,    499.0,
566
+	   -35.0,   -541.0,  -1018.5,  -1467.5,  -1888.0,  -2280.5,
567
+	 -2644.0,  -2979.5,   3287.0,   3567.0,   3820.0,   4046.0,
568
+	  4246.0,   4420.0,   4569.5,   4694.5,   4796.0,   4875.0,
569
+	  4931.5,   4967.5,   4983.0,   4979.5,   4958.0,   4919.0,
570
+	  4863.5,   4792.5,   4708.0,   4609.5,   4499.0,   4377.5,
571
+	  4245.5,   4104.5,   3955.0,   3798.5,   3635.5,   3467.5,
572
+	  3294.5,   3118.5,   2939.5,   2758.5,   2576.5,   2394.0,
573
+	  2212.5,   2031.5,   1852.5,   1675.5,   1502.0,   1331.5,
574
+	  1165.0,   1003.0,    846.0,    694.0,    547.5,    407.0,
575
+	   272.5,    144.0,     22.5,    -92.5,   -201.0,   -302.5,
576
+	  -397.0,   -485.0,   -565.5,   -640.0,   -707.0,   -767.5,
577
+	  -822.0,   -869.5,   -911.0,   -946.5,   -976.0,  -1000.0,
578
+	  1018.5,   1031.5,   1040.0,   1043.5,   1042.5,   1037.5,
579
+	  1028.5,   1016.0,   1000.5,    981.0,    959.5,    935.0,
580
+	   908.5,    879.5,    849.0,    817.0,    783.5,    749.0,
581
+	   714.0,    678.0,    641.5,    605.0,    568.5,    532.0,
582
+	   495.5,    459.5,    424.0,    389.5,    355.5,    322.5,
583
+	   290.5,    259.5,    229.5,    200.5,    173.5,    147.0,
584
+	   122.0,     98.5,     76.5,     55.5,     36.0,     18.0,
585
+		1.0,    -14.5,    -28.5,    -41.5,    -53.0,    -63.5,
586
+	   -73.0,    -81.5,    -88.5,    -94.5,   -100.0,   -104.0,
587
+	  -107.5,   -110.5,   -112.0,   -113.5,   -114.0,   -114.0,
588
+	  -113.5,   -112.5,   -111.0,   -109.0,    106.5,    104.0,
589
+	   101.0,     98.0,     95.0,     91.5,     88.0,     84.5,
590
+	    80.5,     77.0,     73.5,     69.5,     66.0,     62.5,
591
+	    58.5,     55.5,     52.0,     48.5,     45.5,     42.5,
592
+	    39.5,     36.5,     34.0,     31.5,     29.0,     26.5,
593
+	    24.5,     22.5,     20.5,     19.0,     17.5,     15.5,
594
+	    14.5,     13.0,     12.0,     10.5,      9.5,      8.5,
595
+	     8.0,      7.0,      6.5,      5.5,      5.0,      4.5,
596
+	     4.0,      3.5,      3.5,      3.0,      2.5,      2.5,
597
+	     2.0,      2.0,      1.5,      1.5,      1.0,      1.0,
598
+	     1.0,      1.0,      0.5,      0.5,      0.5,      0.5,
599
+	     0.5,      0.5
600
+]);
601
+
602
+// Quantizer lookup, step 1: bitrate classes
603
+MP2.QUANT_LUT_STEP_1 = [
604
+ 	// 32, 48, 56, 64, 80, 96,112,128,160,192,224,256,320,384 <- bitrate
605
+	[   0,  0,  1,  1,  1,  2,  2,  2,  2,  2,  2,  2,  2,  2], // mono
606
+	// 16, 24, 28, 32, 40, 48, 56, 64, 80, 96,112,128,160,192 <- bitrate / chan
607
+	[   0,  0,  0,  0,  0,  0,  1,  1,  1,  2,  2,  2,  2,  2] // stereo
608
+];
609
+
610
+// Quantizer lookup, step 2: bitrate class, sample rate -> B2 table idx, sblimit
611
+MP2.QUANT_TAB = {
612
+	A: (27 | 64), // Table 3-B.2a: high-rate, sblimit = 27
613
+	B: (30 | 64), // Table 3-B.2b: high-rate, sblimit = 30
614
+	C:   8,       // Table 3-B.2c:  low-rate, sblimit =  8
615
+	D:  12        // Table 3-B.2d:  low-rate, sblimit = 12
616
+};
617
+
618
+MP2.QUANT_LUT_STEP_2 = [
619
+	//   44.1 kHz,        48 kHz,          32 kHz
620
+	[MP2.QUANT_TAB.C, MP2.QUANT_TAB.C, MP2.QUANT_TAB.D], // 32 - 48 kbit/sec/ch
621
+	[MP2.QUANT_TAB.A, MP2.QUANT_TAB.A, MP2.QUANT_TAB.A], // 56 - 80 kbit/sec/ch
622
+	[MP2.QUANT_TAB.B, MP2.QUANT_TAB.A, MP2.QUANT_TAB.B]  // 96+	 kbit/sec/ch
623
+];
624
+
625
+// Quantizer lookup, step 3: B2 table, subband -> nbal, row index
626
+// (upper 4 bits: nbal, lower 4 bits: row index)
627
+MP2.QUANT_LUT_STEP_3 = [
628
+	// Low-rate table (3-B.2c and 3-B.2d)
629
+	[
630
+		0x44,0x44,
631
+	  	0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34
632
+	],
633
+	// High-rate table (3-B.2a and 3-B.2b)
634
+	[
635
+		0x43,0x43,0x43,
636
+		0x42,0x42,0x42,0x42,0x42,0x42,0x42,0x42,
637
+		0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,
638
+		0x20,0x20,0x20,0x20,0x20,0x20,0x20
639
+	],
640
+	// MPEG-2 LSR table (B.2 in ISO 13818-3)
641
+	[
642
+		0x45,0x45,0x45,0x45,
643
+		0x34,0x34,0x34,0x34,0x34,0x34,0x34,
644
+		0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,
645
+					   0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24	
646
+	]
647
+];
648
+
649
+// Quantizer lookup, step 4: table row, allocation[] value -> quant table index
650
+MP2.QUANT_LUT_STEP4 = [
651
+	[0, 1, 2, 17],
652
+	[0, 1, 2, 3, 4, 5, 6, 17],
653
+	[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 17],
654
+	[0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17],
655
+	[0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17],
656
+	[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
657
+];
658
+
659
+MP2.QUANT_TAB = [
660
+	{levels:     3, group: 1, bits:  5},  //  1
661
+	{levels:     5, group: 1, bits:  7},  //  2
662
+	{levels:     7, group: 0, bits:  3},  //  3
663
+	{levels:     9, group: 1, bits: 10},  //  4
664
+	{levels:    15, group: 0, bits:  4},  //  5
665
+	{levels:    31, group: 0, bits:  5},  //  6
666
+	{levels:    63, group: 0, bits:  6},  //  7
667
+	{levels:   127, group: 0, bits:  7},  //  8
668
+	{levels:   255, group: 0, bits:  8},  //  9
669
+	{levels:   511, group: 0, bits:  9},  // 10
670
+	{levels:  1023, group: 0, bits: 10},  // 11
671
+	{levels:  2047, group: 0, bits: 11},  // 12
672
+	{levels:  4095, group: 0, bits: 12},  // 13
673
+	{levels:  8191, group: 0, bits: 13},  // 14
674
+	{levels: 16383, group: 0, bits: 14},  // 15
675
+	{levels: 32767, group: 0, bits: 15},  // 16
676
+	{levels: 65535, group: 0, bits: 16}   // 17
677
+];
678
+
679
+return MP2;
680
+
681
+})();
682
+

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 1674 - 0
src/mpeg1.js


+ 243 - 0
src/player.js Ver fichero

@@ -0,0 +1,243 @@
1
+JSMpeg.Player = (function(){ "use strict";
2
+
3
+var Player = function(url, options) {
4
+	this.options = options || {};
5
+
6
+	if (url.match(/^wss?:\/\//)) {
7
+		this.source = new JSMpeg.Source.WebSocket(url, options);
8
+		options.streaming = true;
9
+	}
10
+	else if (options.progressive !== false) {
11
+		this.source = new JSMpeg.Source.AjaxProgressive(url, options);
12
+		options.streaming = false;
13
+	}
14
+	else {
15
+		this.source = new JSMpeg.Source.Ajax(url, options);
16
+		options.streaming = false;
17
+	}
18
+
19
+	this.maxAudioLag = options.maxAudioLag || 0.25;
20
+	this.loop = options.loop !== false;
21
+	this.autoplay = !!options.autoplay || options.streaming;
22
+
23
+	this.demuxer = new JSMpeg.Demuxer.TS(options);
24
+	this.source.connect(this.demuxer);
25
+
26
+	if (options.video !== false) {
27
+		this.video = new JSMpeg.Decoder.MPEG1Video(options);
28
+		this.renderer = !options.disableGl && JSMpeg.Renderer.WebGL.IsSupported()
29
+			? new JSMpeg.Renderer.WebGL(options)
30
+			: new JSMpeg.Renderer.Canvas2D(options);
31
+		this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.VIDEO_1, this.video);
32
+		this.video.connect(this.renderer);
33
+	}
34
+
35
+	if (options.audio !== false && JSMpeg.AudioOutput.WebAudio.IsSupported()) {
36
+		this.audio = new JSMpeg.Decoder.MP2Audio(options);
37
+		this.audioOut = new JSMpeg.AudioOutput.WebAudio(options);
38
+		this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.AUDIO_1, this.audio);
39
+		this.audio.connect(this.audioOut);
40
+	}
41
+
42
+	Object.defineProperty(this, 'currentTime', {
43
+		get: this.getCurrentTime,
44
+		set: this.setCurrentTime
45
+	});
46
+	Object.defineProperty(this, 'volume', {
47
+		get: this.getVolume,
48
+		set: this.setVolume
49
+	});
50
+
51
+	this.unpauseOnShow = false;
52
+	if (options.pauseWhenHidden !== false) {
53
+		document.addEventListener('visibilitychange', this.showHide.bind(this));
54
+	}
55
+
56
+	this.source.start();
57
+
58
+	if (this.autoplay) {
59
+		this.play();
60
+	}
61
+};
62
+
63
+Player.prototype.showHide = function(ev) {
64
+	if (document.visibilityState === 'hidden') {
65
+		this.unpauseOnShow = this.wantsToPlay;
66
+		this.pause();
67
+	}
68
+	else if (this.unpauseOnShow) {
69
+		this.play();
70
+	}
71
+};
72
+
73
+Player.prototype.play = function(ev) {
74
+	this.animationId = requestAnimationFrame(this.update.bind(this));
75
+	this.wantsToPlay = true;
76
+};
77
+
78
+Player.prototype.pause = function(ev) {
79
+	cancelAnimationFrame(this.animationId);
80
+	this.wantsToPlay = false;
81
+	this.isPlaying = false;
82
+
83
+	if (this.audio && this.audio.canPlay) {
84
+		// Seek to the currentTime again - audio may already be enqueued a bit
85
+		// further, so we have to rewind it.
86
+		this.audioOut.stop();
87
+		this.seek(this.currentTime);
88
+	}
89
+};
90
+
91
+Player.prototype.getVolume = function() {
92
+	return this.audioOut ? this.audioOut.volume : 0;
93
+};
94
+
95
+Player.prototype.setVolume = function(volume) {
96
+	if (this.audioOut) {
97
+		this.audioOut.volume = volume;
98
+	}
99
+};
100
+
101
+Player.prototype.stop = function(ev) {
102
+	this.pause();
103
+	this.seek(0);
104
+	if (this.video && this.options.decodeFirstFrame !== false) {
105
+		this.video.decode();
106
+	}
107
+};
108
+
109
+Player.prototype.seek = function(time) {
110
+	var startOffset = this.audio && this.audio.canPlay
111
+		? this.audio.startTime
112
+		: this.video.startTime;
113
+
114
+	if (this.video) {
115
+		this.video.seek(time + startOffset);
116
+	}
117
+	if (this.audio) {
118
+		this.audio.seek(time + startOffset);
119
+	}
120
+
121
+	this.startTime = JSMpeg.Now() - time;
122
+};
123
+
124
+Player.prototype.getCurrentTime = function() {
125
+	return this.audio && this.audio.canPlay
126
+		? this.audio.currentTime - this.audio.startTime
127
+		: this.video.currentTime - this.video.startTime;
128
+};
129
+
130
+Player.prototype.setCurrentTime = function(time) {
131
+	this.seek(time);
132
+};
133
+
134
+Player.prototype.update = function() {
135
+	this.animationId = requestAnimationFrame(this.update.bind(this));
136
+
137
+	if (!this.source.established) {
138
+		if (this.renderer) {
139
+			this.renderer.renderProgress(this.source.progress);
140
+		}
141
+		return;
142
+	}
143
+
144
+	if (!this.isPlaying) {
145
+		this.isPlaying = true;
146
+		this.startTime = JSMpeg.Now() - this.currentTime;
147
+	}
148
+
149
+	if (this.options.streaming) {
150
+		this.updateForStreaming();
151
+	}
152
+	else {
153
+		this.updateForStaticFile();
154
+	}
155
+};
156
+
157
+Player.prototype.updateForStreaming = function() {
158
+	// When streaming, immediately decode everything we have buffered up until
159
+	// now to minimize playback latency.
160
+
161
+	if (this.video) {
162
+		this.video.decode();
163
+	}
164
+
165
+	if (this.audio) {
166
+		var decoded = false;
167
+		do {
168
+			// If there's a lot of audio enqueued already, disable output and
169
+			// catch up with the encoding.
170
+			if (this.audioOut.enqueuedTime > this.maxAudioLag) {
171
+				this.audioOut.resetEnqueuedTime();
172
+				this.audioOut.enabled = false;
173
+			}
174
+			decoded = this.audio.decode();		
175
+		} while (decoded);
176
+		this.audioOut.enabled = true;
177
+	}
178
+};
179
+
180
+Player.prototype.updateForStaticFile = function() {
181
+	var notEnoughData = false,
182
+		headroom = 0;
183
+
184
+	// If we have an audio track, we always try to sync the video to the audio.
185
+	// Gaps and discontinuities are far more percetable in audio than in video.
186
+
187
+	if (this.audio && this.audio.canPlay) {
188
+		// Do we have to decode and enqueue some more audio data?
189
+		while (
190
+			!notEnoughData && 
191
+			this.audio.decodedTime - this.audio.currentTime < 0.25
192
+		) {
193
+			notEnoughData = !this.audio.decode();
194
+		}
195
+
196
+		// Sync video to audio
197
+		if (this.video && this.video.currentTime < this.audio.currentTime) {
198
+			notEnoughData = !this.video.decode();
199
+		}
200
+
201
+		headroom = this.demuxer.currentTime - this.audio.currentTime;
202
+	}
203
+
204
+
205
+	else if (this.video) {
206
+		// Video only - sync it to player's wallclock
207
+		var targetTime = (JSMpeg.Now() - this.startTime) + this.video.startTime,
208
+			lateTime = targetTime - this.video.currentTime,
209
+			frameTime = 1/this.video.frameRate;
210
+
211
+		if (this.video && lateTime > 0) {
212
+			// If the video is too far behind (>2 frames), simply reset the
213
+			// target time to the next frame instead of trying to catch up.
214
+			if (lateTime > frameTime * 2) {
215
+				this.startTime += lateTime;
216
+			}
217
+
218
+			notEnoughData = !this.video.decode();
219
+		}
220
+
221
+		headroom = this.demuxer.currentTime - targetTime;
222
+	}
223
+
224
+	// Notify the source of the playhead headroom, so it can decide whether to
225
+	// continue loading further data.
226
+	this.source.resume(headroom);
227
+
228
+	// If we failed to decode and the source is complete, it means we reached
229
+	// the end of our data. We may want to loop.
230
+	if (notEnoughData && this.source.completed) {
231
+		if (this.loop) {
232
+			this.seek(0);
233
+		}
234
+		else {
235
+			this.pause();
236
+		}
237
+	}
238
+};
239
+
240
+return Player;
241
+
242
+})();
243
+

+ 190 - 0
src/ts.js Ver fichero

@@ -0,0 +1,190 @@
1
+JSMpeg.Demuxer.TS = (function(){ "use strict";
2
+
3
+var TS = function(options) {
4
+	this.bits = null;
5
+	this.leftoverBytes = null;
6
+
7
+	this.guessVideoFrameEnd = true;
8
+	this.pidsToStreamIds = {};
9
+
10
+	this.pesPacketInfo = {};
11
+	this.startTime = 0;
12
+	this.currentTime = 0;
13
+};
14
+
15
+TS.prototype.connect = function(streamId, destination) {
16
+	this.pesPacketInfo[streamId] = {
17
+		destination: destination,
18
+		currentLength: 0,
19
+		totalLength: 0,
20
+		pts: 0,
21
+		buffers: []
22
+	};
23
+};
24
+
25
+TS.prototype.write = function(buffer) {
26
+	if (this.leftoverBytes) {
27
+		var totalLength = buffer.byteLength + this.leftoverBytes.byteLength;
28
+		this.bits = new JSMpeg.BitBuffer(totalLength);
29
+		this.bits.write([this.leftoverBytes, buffer]);
30
+	}
31
+	else {
32
+		this.bits = new JSMpeg.BitBuffer(buffer);
33
+	}
34
+
35
+	while (this.bits.has(188 << 3)) {
36
+		this.parsePacket();
37
+	}
38
+
39
+	var leftoverCount = this.bits.byteLength - (this.bits.index >> 3);
40
+	this.leftoverBytes = leftoverCount > 0
41
+		? this.bits.bytes.subarray(this.bits.index >> 3)
42
+		: null;
43
+};
44
+
45
+TS.prototype.parsePacket = function() {
46
+	var end = (this.bits.index >> 3) + 188;
47
+
48
+	if (this.bits.read(8) !== 0x47) {
49
+		throw("Sync Token not found");
50
+	}
51
+
52
+	var transportError = this.bits.read(1),
53
+		payloadStart = this.bits.read(1),
54
+		transportPriority = this.bits.read(1),
55
+		pid = this.bits.read(13),
56
+		transportScrambling = this.bits.read(2),
57
+		adaptationField = this.bits.read(2),
58
+		continuityCounter = this.bits.read(4);
59
+
60
+
61
+	// If this is the start of a new payload; signal the end of the previous
62
+	// frame, if we didn't do so already.
63
+	var streamId = this.pidsToStreamIds[pid];
64
+	if (payloadStart && streamId) {
65
+		var pi = this.pesPacketInfo[streamId];
66
+		if (pi && pi.currentLength) {
67
+			this.packetComplete(pi);
68
+		}
69
+	}
70
+
71
+	// Extract current payload
72
+	if (adaptationField & 0x1) {
73
+		if ((adaptationField & 0x2)) {
74
+			var adaptationFieldLength = this.bits.read(8);
75
+			this.bits.skip(adaptationFieldLength << 3);
76
+		}
77
+
78
+		if (payloadStart && this.bits.nextBytesAreStartCode()) {
79
+			this.bits.skip(24);
80
+			streamId = this.bits.read(8);
81
+			this.pidsToStreamIds[pid] = streamId;
82
+
83
+			var packetLength = this.bits.read(16)
84
+			this.bits.skip(8);
85
+			var ptsDtsFlag = this.bits.read(2);
86
+			this.bits.skip(6);
87
+			var headerLength = this.bits.read(8);
88
+			var payloadBeginIndex = this.bits.index + (headerLength << 3);
89
+			
90
+			var pi = this.pesPacketInfo[streamId];
91
+			if (pi) {
92
+				var pts = 0;
93
+				if (ptsDtsFlag & 0x2) {
94
+					// The Presentation Timestamp is encoded as 33(!) bit
95
+					// integer, but has a "marker bit" inserted at weird places
96
+					// in between, making the whole thing 5 bytes in size.
97
+					// You can't make this shit up...
98
+					this.bits.skip(4);
99
+					var p32_30 = this.bits.read(3);
100
+					this.bits.skip(1);
101
+					var p29_15 = this.bits.read(15);
102
+					this.bits.skip(1);
103
+					var p14_0 = this.bits.read(15);
104
+					this.bits.skip(1);
105
+
106
+					// Can't use bit shifts here; we need 33 bits of precision,
107
+					// so we're using JavaScript's double number type. Also
108
+					// devide by the 90khz clock to get the pts in seconds
109
+					pts = (p32_30 * 1073741824 + p29_15 * 32768 + p14_0)/90000;
110
+					
111
+					this.currentTime = pts;
112
+					if (this.startTime === -1) {
113
+						this.startTime = pts;
114
+					}
115
+				}
116
+
117
+				var payloadLength = packetLength 
118
+					? packetLength - headerLength - 3
119
+					: 0;
120
+				this.packetStart(pi, pts, payloadLength);
121
+			}
122
+
123
+			// Skip the rest of the header without parsing it
124
+			this.bits.index = payloadBeginIndex;
125
+		}
126
+
127
+		if (streamId) {
128
+			// Attempt to detect if the PES packet is complete. For Audio (and
129
+			// other) packets, we received a total packet length with the PES 
130
+			// header, so we can check the current length.
131
+
132
+			// For Video packets, we have to guess the end by detecting if this
133
+			// TS packet was padded - there's no good reason to pad a TS packet 
134
+			// in between, but it might just fit exactly. If this fails, we can
135
+			// only wait for the next PES header for that stream.
136
+
137
+			var pi = this.pesPacketInfo[streamId];
138
+			if (pi) {
139
+				var start = this.bits.index >> 3;
140
+				var complete = this.packetAddData(pi, start, end);
141
+
142
+				var hasPadding = !payloadStart && (adaptationField & 0x2);
143
+				if (complete || (this.guessVideoFrameEnd && hasPadding)) {
144
+					this.packetComplete(pi);	
145
+				}
146
+			}
147
+		}
148
+	}
149
+
150
+	this.bits.index = end << 3;
151
+};
152
+
153
+TS.prototype.packetStart = function(pi, pts, payloadLength) {
154
+	pi.totalLength = payloadLength;
155
+	pi.currentLength = 0;
156
+	pi.pts = pts;
157
+};
158
+
159
+TS.prototype.packetAddData = function(pi, start, end) {
160
+	pi.buffers.push(this.bits.bytes.subarray(start, end));
161
+	pi.currentLength += end - start;
162
+
163
+	var complete = (pi.totalLength !== 0 && pi.currentLength >= pi.totalLength);
164
+	return complete;
165
+};
166
+
167
+TS.prototype.packetComplete = function(pi) {
168
+	pi.destination.write(pi.pts, pi.buffers);
169
+	pi.totalLength = 0;
170
+	pi.currentLength = 0;
171
+	pi.buffers = [];
172
+};
173
+
174
+TS.STREAM = {
175
+	PACK_HEADER: 0xBA,
176
+	SYSTEM_HEADER: 0xBB,
177
+	PROGRAM_MAP: 0xBC,
178
+	PRIVATE_1: 0xBD,
179
+	PADDING: 0xBE,
180
+	PRIVATE_2: 0xBF,
181
+	AUDIO_1: 0xC0,
182
+	VIDEO_1: 0xE0,
183
+	DIRECTORY: 0xFF
184
+};
185
+
186
+return TS;
187
+
188
+})();
189
+
190
+

+ 161 - 0
src/video-element.js Ver fichero

@@ -0,0 +1,161 @@
1
+JSMpeg.VideoElement = (function(){ "use strict";
2
+
3
+var VideoElement = function(element) {	
4
+	var url = element.dataset.url;
5
+
6
+	if (!url) {
7
+		throw ("VideoElement has no `data-url` attribute");
8
+	}
9
+
10
+	// Setup the div container, canvas and play button
11
+	var addStyles = function(element, styles) {
12
+		for (var name in styles) {
13
+			element.style[name] = styles[name];
14
+		}
15
+	};
16
+
17
+	this.container = element;
18
+	addStyles(this.container, {
19
+		display: 'inline-block',
20
+		position: 'relative',
21
+		minWidth: '80px', minHeight: '80px'
22
+	});
23
+
24
+	this.canvas = document.createElement('canvas');
25
+	this.canvas.width = 960;
26
+	this.canvas.height = 540;
27
+	addStyles(this.canvas, {
28
+		display: 'block',
29
+		width: '100%'
30
+	});
31
+	this.container.appendChild(this.canvas);
32
+
33
+	this.playButton = document.createElement('div');
34
+	this.playButton.innerHTML = VideoElement.PLAY_BUTTON;
35
+	addStyles(this.playButton, {
36
+		zIndex: 2, position: 'absolute',
37
+		top: '0', bottom: '0', left: '0', right: '0',
38
+		maxWidth: '75px', maxHeight: '75px',
39
+		margin: 'auto',
40
+		opacity: '0.7',
41
+		cursor: 'pointer'
42
+	});
43
+	this.container.appendChild(this.playButton);	
44
+
45
+	// Parse the data-options - we try to decode the values as json. This way
46
+	// we can get proper boolean and number values. If JSON.parse() fails,
47
+	// treat it as a string.
48
+	var options = {canvas: this.canvas};
49
+	for (var option in element.dataset) {
50
+		try {
51
+			options[option] = JSON.parse(element.dataset[option]);
52
+		}
53
+		catch(err) {
54
+			options[option] = element.dataset[option];
55
+		}
56
+	}
57
+
58
+	// Create the player instance
59
+	this.player = new JSMpeg.Player(url, options);
60
+
61
+	// Setup the poster element, if any
62
+	if (options.poster && !options.autoplay && !this.player.options.streaming) {
63
+		options.decodeFirstFrame = false;
64
+		this.poster = new Image();
65
+		this.poster.src = options.poster;
66
+		this.poster.addEventListener('load', this.posterLoaded)
67
+		addStyles(this.poster, {
68
+			display: 'block', zIndex: 1, position: 'absolute',
69
+			top: 0, left: 0, bottom: 0, right: 0
70
+		});
71
+		this.container.appendChild(this.poster);
72
+	}
73
+
74
+	// Add the click handler if this video is pausable
75
+	if (!this.player.options.streaming) {
76
+		this.container.addEventListener('click', this.onClick.bind(this));
77
+	}
78
+
79
+	// Hide the play button if this video immediately begins playing
80
+	if (options.autoplay || this.player.options.streaming) {
81
+		this.playButton.style.display = 'none';
82
+	}
83
+
84
+	// Set up the unlock audio buton for iOS devices. iOS only allows us to 
85
+	// play audio after a user action has initiated playing. For autoplay or 
86
+	// streaming players we set up a muted speaker icon as the button. For all
87
+	// others, we can simply use the play button.
88
+	if (this.player.audioOut && !this.player.audioOut.unlocked) {
89
+		var unlockAudioElement = this.container;
90
+
91
+		if (options.autoplay || this.player.options.streaming) {
92
+			this.unmuteButton = document.createElement('div');
93
+			this.unmuteButton.innerHTML = VideoElement.UNMUTE_BUTTON;
94
+			addStyles(this.unmuteButton, {
95
+				zIndex: 2, position: 'absolute',
96
+				bottom: '10px', right: '20px',
97
+				width: '75px', height: '75px',
98
+				margin: 'auto',
99
+				opacity: '0.7',
100
+				cursor: 'pointer'				
101
+			});
102
+			this.container.appendChild(this.unmuteButton);
103
+			unlockAudioElement = this.unmuteButton;
104
+		}
105
+
106
+		this.unlockAudioBound = this.onUnlockAudio.bind(this, unlockAudioElement);
107
+		unlockAudioElement.addEventListener('touchstart', this.unlockAudioBound, false);
108
+		unlockAudioElement.addEventListener('click', this.unlockAudioBound, true);
109
+	}
110
+};
111
+
112
+VideoElement.prototype.onUnlockAudio = function(element, ev) {
113
+	if (this.unmuteButton) {
114
+		ev.preventDefault();
115
+		ev.stopPropagation();
116
+	}
117
+	this.player.audioOut.unlock(function(){
118
+		if (this.unmuteButton) {
119
+			this.unmuteButton.style.display = 'none';
120
+		}
121
+		element.removeEventListener('touchstart', this.unlockAudioBound);
122
+		element.removeEventListener('click', this.unlockAudioBound);
123
+	}.bind(this));
124
+};
125
+
126
+VideoElement.prototype.onClick = function(ev) {
127
+	if (this.player.isPlaying) {
128
+		this.player.pause();
129
+		this.playButton.style.display = 'block';
130
+	}
131
+	else {
132
+		this.player.play();
133
+		this.playButton.style.display = 'none';
134
+		if (this.poster) {
135
+			this.poster.style.display = 'none';
136
+		}
137
+	}
138
+};
139
+
140
+VideoElement.PLAY_BUTTON = 
141
+	'<svg style="max-width: 75px; max-height: 75px;" ' +
142
+		'viewBox="0 0 200 200" alt="Play video">' +
143
+		'<circle cx="100" cy="100" r="90" fill="none" '+
144
+			'stroke-width="15" stroke="#fff"/>' +
145
+		'<polygon points="70, 55 70, 145 145, 100" fill="#fff"/>' +
146
+	'</svg>';
147
+
148
+VideoElement.UNMUTE_BUTTON = 
149
+	'<svg style="max-width: 75px; max-height: 75px;" viewBox="0 0 75 75">' +
150
+		'<polygon class="audio-speaker" stroke="none" fill="#fff" '+
151
+			'points="39,13 22,28 6,28 6,47 21,47 39,62 39,13"/>' +
152
+		'<g stroke="#fff" stroke-width="5">' +
153
+			'<path d="M 49,50 69,26"/>' +
154
+			'<path d="M 69,50 49,26"/>' +
155
+		'</g>' +
156
+	'</svg>';
157
+
158
+return VideoElement;
159
+
160
+})();
161
+

+ 131 - 0
src/webaudio.js Ver fichero

@@ -0,0 +1,131 @@
1
+JSMpeg.AudioOutput.WebAudio = (function() { "use strict";
2
+
3
+var WebAudioOut = function(options) {
4
+	this.context = WebAudioOut.CachedContext =
5
+		WebAudioOut.CachedContext ||
6
+		new (window.AudioContext || window.webkitAudioContext)();
7
+
8
+	this.gain = this.context.createGain();
9
+	this.gain.connect(this.context.destination);
10
+	this.destination = this.gain;
11
+
12
+	this.startTime = 0;
13
+	this.buffer = null;
14
+	this.wallclockStartTime = 0;
15
+	this.volume = 1;
16
+	this.enabled = true;
17
+
18
+	this.unlocked = !WebAudioOut.NeedsUnlocking();
19
+	
20
+	Object.defineProperty(this, 'enqueuedTime', {get: this.getEnqueuedTime});
21
+};
22
+
23
+WebAudioOut.prototype.play = function(sampleRate, left, right) {
24
+	if (!this.enabled) {
25
+		return;
26
+	}
27
+
28
+	// If the context is not unlocked yet, we simply advance the start time
29
+	// to "fake" actually playing audio. This will keep the video in sync.
30
+	if (!this.unlocked) {
31
+		var ts = JSMpeg.Now()
32
+		if (this.wallclockStartTime < ts) {
33
+			this.wallclockStartTime = ts;
34
+		}
35
+		this.wallclockStartTime += left.length / sampleRate;
36
+		return;
37
+	}
38
+
39
+
40
+	this.gain.gain.value = this.volume;
41
+
42
+	var buffer = this.context.createBuffer(2, left.length, sampleRate);
43
+	buffer.getChannelData(0).set(left);
44
+	buffer.getChannelData(1).set(right);
45
+
46
+	var source = this.context.createBufferSource();
47
+	source.buffer = buffer;
48
+	source.connect(this.destination);
49
+
50
+	var now = this.context.currentTime;
51
+	var duration = buffer.duration;
52
+	if (this.startTime < now) {
53
+		this.startTime = now;
54
+		this.wallclockStartTime = JSMpeg.Now();
55
+	}
56
+
57
+	source.start(this.startTime);
58
+	this.startTime += duration;
59
+	this.wallclockStartTime += duration;
60
+};
61
+
62
+WebAudioOut.prototype.stop = function() {
63
+	// Meh; there seems to be no simple way to get a list of currently
64
+	// active source nodes from the Audio Context, and maintaining this
65
+	// list ourselfs would be a pain, so we just set the gain to 0
66
+	// to cut off all enqueued audio instantly.
67
+	this.gain.gain.value = 0;
68
+};
69
+
70
+WebAudioOut.prototype.getEnqueuedTime = function() {
71
+	// The AudioContext.currentTime is only updated every so often, so if we
72
+	// want to get exact timing, we need to rely on the system time.
73
+	return Math.max(this.wallclockStartTime - JSMpeg.Now(), 0)
74
+};
75
+
76
+WebAudioOut.prototype.resetEnqueuedTime = function() {
77
+	this.startTime = this.context.currentTime;
78
+	this.wallclockStartTime = JSMpeg.Now();
79
+};
80
+
81
+WebAudioOut.prototype.unlock = function(callback) {
82
+	if (this.unlocked) {
83
+		if (callback) {
84
+			callback();
85
+		}
86
+		return;
87
+	}
88
+
89
+	this.unlockCallback = callback;
90
+	
91
+	// Create empty buffer and play it
92
+	var buffer = this.context.createBuffer(1, 1, 22050);
93
+	var source = this.context.createBufferSource();
94
+	source.buffer = buffer;
95
+	source.connect(this.destination);
96
+	source.start(0);
97
+
98
+	setTimeout(this.checkIfUnlocked.bind(this, source, 0), 0);
99
+};
100
+
101
+WebAudioOut.prototype.checkIfUnlocked = function(source, attempt) {
102
+	if (
103
+		source.playbackState === source.PLAYING_STATE || 
104
+		source.playbackState === source.FINISHED_STATE
105
+	) {
106
+		this.unlocked = true;
107
+		if (this.unlockCallback) {
108
+			this.unlockCallback();
109
+			this.unlockCallback = null;	
110
+		}
111
+	}
112
+	else if (attempt < 10) {
113
+		// Jeez, what a shit show. Thanks iOS!
114
+		setTimeout(this.checkIfUnlocked.bind(this, source, attempt+1), 100);
115
+	}
116
+};
117
+
118
+WebAudioOut.NeedsUnlocking = function() {
119
+	return /iPhone|iPad|iPod/i.test(navigator.userAgent);
120
+};
121
+
122
+WebAudioOut.IsSupported = function() {
123
+	return (window.AudioContext || window.webkitAudioContext);
124
+};
125
+
126
+WebAudioOut.CachedContext = null;
127
+
128
+return WebAudioOut;
129
+
130
+})();
131
+

+ 230 - 0
src/webgl.js Ver fichero

@@ -0,0 +1,230 @@
1
+JSMpeg.Renderer.WebGL = (function(){ "use strict";
2
+
3
+var WebGLRenderer = function(options) {
4
+	this.canvas = options.canvas || document.createElement('canvas');
5
+	this.width = this.canvas.width;
6
+	this.height = this.canvas.height;
7
+	this.enabled = true;
8
+
9
+	var contextCreateOptions = {
10
+		preserveDrawingBuffer: !!options.preserveDrawingBuffer,
11
+		alpha: false,
12
+		depth: false,
13
+		stencil: false,
14
+		antialias: false
15
+	};
16
+
17
+	this.gl = 
18
+		this.canvas.getContext('webgl', contextCreateOptions) || 
19
+		this.canvas.getContext('experimental-webgl', contextCreateOptions);
20
+
21
+	if (!this.gl) {
22
+		throw new Error('Failed to get WebGL Context');
23
+	}
24
+
25
+	var gl = this.gl;
26
+	var vertexAttr = null;
27
+
28
+	// Init buffers
29
+	var vertexBuffer = gl.createBuffer();
30
+	var vertexCoords = new Float32Array([0, 0, 0, 1, 1, 0, 1, 1]);
31
+	gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
32
+	gl.bufferData(gl.ARRAY_BUFFER, vertexCoords, gl.STATIC_DRAW);
33
+
34
+	// Setup the main YCrCbToRGBA shader
35
+	this.program = this.createProgram(
36
+		WebGLRenderer.SHADER.VERTEX_IDENTITY,
37
+		WebGLRenderer.SHADER.FRAGMENT_YCRCB_TO_RGBA
38
+	);
39
+	vertexAttr = gl.getAttribLocation(this.program, 'vertex');
40
+	gl.enableVertexAttribArray(vertexAttr);
41
+	gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0);
42
+
43
+	this.textureY = this.createTexture(0, 'textureY');
44
+	this.textureCb = this.createTexture(1, 'textureCb');
45
+	this.textureCr = this.createTexture(2, 'textureCr');
46
+
47
+
48
+	// Setup the loading animation shader
49
+	this.loadingProgram = this.createProgram(
50
+		WebGLRenderer.SHADER.VERTEX_IDENTITY,
51
+		WebGLRenderer.SHADER.FRAGMENT_LOADING
52
+	);
53
+	vertexAttr = gl.getAttribLocation(this.loadingProgram, 'vertex');
54
+	gl.enableVertexAttribArray(vertexAttr);
55
+	gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0);
56
+
57
+	this.shouldCreateUnclampedViews = !this.allowsClampedTextureData();
58
+};
59
+
60
+WebGLRenderer.prototype.resize = function(width, height) {
61
+	this.width = width|0;
62
+	this.height = height|0;
63
+
64
+	this.canvas.width = this.width;
65
+	this.canvas.height = this.height;
66
+
67
+	this.gl.useProgram(this.program);
68
+	this.gl.viewport(0, 0, this.width, this.height);
69
+};
70
+
71
+WebGLRenderer.prototype.createTexture = function(index, name) {
72
+	var gl = this.gl;
73
+	var texture = gl.createTexture();
74
+
75
+	gl.bindTexture(gl.TEXTURE_2D, texture);
76
+	gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
77
+	gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
78
+	gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
79
+	gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
80
+	gl.uniform1i(gl.getUniformLocation(this.program, name), index);
81
+
82
+	return texture;
83
+};
84
+
85
+WebGLRenderer.prototype.createProgram = function(vsh, fsh) {
86
+	var gl = this.gl;
87
+	var program = gl.createProgram();
88
+
89
+	gl.attachShader(program, this.compileShader(gl.VERTEX_SHADER, vsh));
90
+	gl.attachShader(program, this.compileShader(gl.FRAGMENT_SHADER, fsh));
91
+	gl.linkProgram(program);
92
+	gl.useProgram(program);
93
+
94
+	return program;
95
+};
96
+
97
+WebGLRenderer.prototype.compileShader = function(type, source) {
98
+	var gl = this.gl;
99
+	var shader = gl.createShader(type);
100
+	gl.shaderSource(shader, source);
101
+	gl.compileShader(shader);
102
+
103
+	if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
104
+		throw new Error(gl.getShaderInfoLog(shader));
105
+	}
106
+
107
+	return shader;
108
+};
109
+
110
+WebGLRenderer.prototype.allowsClampedTextureData = function() {
111
+	var gl = this.gl;
112
+	var texture = gl.createTexture();
113
+
114
+	gl.bindTexture(gl.TEXTURE_2D, texture);
115
+	gl.texImage2D(
116
+		gl.TEXTURE_2D, 0, gl.LUMINANCE, 1, 1, 0,
117
+		gl.LUMINANCE, gl.UNSIGNED_BYTE, new Uint8ClampedArray([0])
118
+	);
119
+	return (gl.getError() === 0);
120
+};
121
+
122
+WebGLRenderer.prototype.renderProgress = function(progress) {
123
+	var gl = this.gl;
124
+	var loc = gl.getUniformLocation(this.loadingProgram, 'progress');
125
+	gl.uniform1f(loc, progress);
126
+	gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
127
+};
128
+
129
+WebGLRenderer.prototype.render = function(y, cb, cr) {
130
+	if (!this.enabled) {
131
+		return;
132
+	}
133
+
134
+	var gl = this.gl;
135
+	var w = ((this.width + 15) >> 4) << 4,
136
+		h = this.height,
137
+		w2 = w >> 1,
138
+		h2 = h >> 1;
139
+
140
+	// In some browsers WebGL doesn't like Uint8ClampedArrays (this is a bug
141
+	// and should be fixed soon-ish), so we have to create a Uint8Array view 
142
+	// for each plane.
143
+	if (this.shouldCreateUnclampedViews) {
144
+		y = new Uint8Array(y.buffer),
145
+		cb = new Uint8Array(cb.buffer),
146
+		cr = new Uint8Array(cr.buffer);	
147
+	}	
148
+
149
+	this.updateTexture(gl.TEXTURE0, this.textureY, w, h, y);
150
+	this.updateTexture(gl.TEXTURE1, this.textureCb, w2, h2, cb);
151
+	this.updateTexture(gl.TEXTURE2, this.textureCr, w2, h2, cr);
152
+
153
+	gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
154
+};
155
+
156
+WebGLRenderer.prototype.updateTexture = function(unit, texture, w, h, data) {
157
+	var gl = this.gl;
158
+	gl.activeTexture(unit);
159
+	gl.bindTexture(gl.TEXTURE_2D, texture);
160
+	gl.texImage2D(
161
+		gl.TEXTURE_2D, 0, gl.LUMINANCE, w, h, 0, 
162
+		gl.LUMINANCE, gl.UNSIGNED_BYTE, data
163
+	);
164
+}
165
+
166
+WebGLRenderer.IsSupported = function() {
167
+	try {
168
+		if (!window.WebGLRenderingContext) {
169
+			return false;
170
+		}
171
+
172
+		var canvas = document.createElement('canvas'); 
173
+		return !!(
174
+			canvas.getContext('webgl') || 
175
+			canvas.getContext('experimental-webgl')
176
+		);
177
+	}
178
+	catch (err) {
179
+		return false;
180
+	} 
181
+};
182
+
183
+WebGLRenderer.SHADER = {
184
+	FRAGMENT_YCRCB_TO_RGBA: [
185
+		'precision mediump float;',
186
+		'uniform sampler2D textureY;',
187
+		'uniform sampler2D textureCb;',
188
+		'uniform sampler2D textureCr;',
189
+		'varying vec2 texCoord;',
190
+
191
+		'void main() {',
192
+			'float y = texture2D(textureY, texCoord).r;',
193
+			'float cb = texture2D(textureCb, texCoord).r - 0.5;',
194
+			'float cr = texture2D(textureCr, texCoord).r - 0.5;',
195
+
196
+			'gl_FragColor = vec4(',
197
+				'y + 1.4 * cb,',
198
+				'y + -0.343 * cr - 0.711 * cb,',
199
+				'y + 1.765 * cr,',
200
+				'1.0',
201
+			');',
202
+		'}'
203
+	].join('\n'),
204
+
205
+	FRAGMENT_LOADING: [
206
+		'precision mediump float;',
207
+		'uniform float progress;',
208
+		'varying vec2 texCoord;',
209
+
210
+		'void main() {',
211
+			'float c = ceil(progress-(1.0-texCoord.y));',
212
+			'gl_FragColor = vec4(c,c,c,1);',
213
+		'}'
214
+	].join('\n'),
215
+
216
+	VERTEX_IDENTITY: [
217
+		'attribute vec2 vertex;',
218
+		'varying vec2 texCoord;',
219
+
220
+		'void main() {',
221
+			'texCoord = vertex;',
222
+			'gl_Position = vec4((vertex * 2.0 - 1.0) * vec2(1, -1), 0.0, 1.0);',
223
+		'}'
224
+	].join('\n')
225
+};
226
+
227
+return WebGLRenderer;
228
+
229
+})();
230
+

+ 66 - 0
src/websocket.js Ver fichero

@@ -0,0 +1,66 @@
1
+JSMpeg.Source.WebSocket = (function(){ "use strict";
2
+
3
+var WSSource = function(url, options) {
4
+	this.url = url;
5
+	this.socket = null;	
6
+
7
+	this.callbacks = {connect: [], data: []};
8
+	this.destination = null;
9
+
10
+	this.reconnectInterval = options.reconnectInterval || 5;
11
+	this.shouldAttemptReconnect = !!this.reconnectInterval;
12
+
13
+	this.completed = false;
14
+	this.established = false;
15
+	this.progress = 0;
16
+};
17
+
18
+WSSource.prototype.connect = function(destination) {
19
+	this.destination = destination;
20
+};
21
+
22
+WSSource.prototype.abort = function() {
23
+	this.shouldAttemptReconnect = false;
24
+	this.socket.close();
25
+};
26
+
27
+WSSource.prototype.start = function() {
28
+	this.shouldAttemptReconnect = !!this.reconnectInterval;
29
+	this.progress = 0;
30
+	this.established = false;
31
+
32
+	this.socket = new WebSocket(this.url);
33
+	this.socket.binaryType = 'arraybuffer';
34
+	this.socket.onmessage = this.onMessage.bind(this);
35
+	this.socket.onopen = this.onOpen.bind(this);
36
+	this.socket.onerror = this.onClose.bind(this);
37
+	this.socket.onclose = this.onClose.bind(this);
38
+};
39
+
40
+WSSource.prototype.resume = function(secondsHeadroom) {
41
+	// Nothing to do here
42
+};
43
+
44
+WSSource.prototype.onOpen = function() {
45
+	this.progress = 1;
46
+	this.established = true;
47
+};
48
+
49
+WSSource.prototype.onClose = function() {
50
+	if (this.shouldAttemptReconnect) {
51
+		setTimeout(function(){
52
+			this.start();	
53
+		}.bind(this), this.reconnectInterval*1000);
54
+	}
55
+};
56
+
57
+WSSource.prototype.onMessage = function(ev) {
58
+	if (this.destination) {
59
+		this.destination.write(ev.data);
60
+	}
61
+};
62
+
63
+return WSSource;
64
+
65
+})();
66
+

+ 0 - 42
stream-example.html Ver fichero

@@ -1,42 +0,0 @@
1
-<!DOCTYPE html>
2
-<html>
3
-<head>
4
-	<meta name="viewport" content="width=320, initial-scale=1"/>
5
-	<title>jsmpeg streaming</title>
6
-	<style type="text/css">
7
-		body {
8
-			background: #333;
9
-			text-align: center;
10
-			margin-top: 10%;
11
-		}
12
-		#videoCanvas {
13
-			/* Always stretch the canvas to 640x480, regardless of its
14
-			internal size. */
15
-			width: 640px;
16
-			height: 480px;
17
-		}
18
-	</style>
19
-</head>
20
-<body>
21
-	<!-- The Canvas size specified here is the "initial" internal resolution. jsmpeg will
22
-		change this internal resolution to whatever the source provides. The size the
23
-		canvas is displayed on the website is dictated by the CSS style.
24
-	-->
25
-	<canvas id="videoCanvas" width="640" height="480">
26
-		<p>
27
-			Please use a browser that supports the Canvas Element, like
28
-			<a href="http://www.google.com/chrome">Chrome</a>,
29
-			<a href="http://www.mozilla.com/firefox/">Firefox</a>,
30
-			<a href="http://www.apple.com/safari/">Safari</a> or Internet Explorer 10
31
-		</p>
32
-	</canvas>
33
-	<script type="text/javascript" src="jsmpg.js"></script>
34
-	<script type="text/javascript">
35
-		// Setup the WebSocket connection and start the player
36
-		var client = new WebSocket( 'ws://example.com:8084/' );
37
-
38
-		var canvas = document.getElementById('videoCanvas');
39
-		var player = new jsmpeg(client, {canvas:canvas});
40
-	</script>
41
-</body>
42
-</html>

+ 0 - 76
stream-server.js Ver fichero

@@ -1,76 +0,0 @@
1
-
2
-if( process.argv.length < 3 ) {
3
-	console.log(
4
-		'Usage: \n' +
5
-		'node stream-server.js <secret> [<stream-port> <websocket-port>]'
6
-	);
7
-	process.exit();
8
-}
9
-
10
-var STREAM_SECRET = process.argv[2],
11
-	STREAM_PORT = process.argv[3] || 8082,
12
-	WEBSOCKET_PORT = process.argv[4] || 8084,
13
-	STREAM_MAGIC_BYTES = 'jsmp'; // Must be 4 bytes
14
-
15
-var width = 320,
16
-	height = 240;
17
-
18
-// Websocket Server
19
-var socketServer = new (require('ws').Server)({port: WEBSOCKET_PORT});
20
-socketServer.on('connection', function(socket) {
21
-	// Send magic bytes and video size to the newly connected socket
22
-	// struct { char magic[4]; unsigned short width, height;}
23
-	var streamHeader = new Buffer(8);
24
-	streamHeader.write(STREAM_MAGIC_BYTES);
25
-	streamHeader.writeUInt16BE(width, 4);
26
-	streamHeader.writeUInt16BE(height, 6);
27
-	socket.send(streamHeader, {binary:true});
28
-
29
-	console.log( 'New WebSocket Connection ('+socketServer.clients.length+' total)' );
30
-	
31
-	socket.on('close', function(code, message){
32
-		console.log( 'Disconnected WebSocket ('+socketServer.clients.length+' total)' );
33
-	});
34
-});
35
-
36
-socketServer.broadcast = function(data, opts) {
37
-	for( var i in this.clients ) {
38
-		if (this.clients[i].readyState == 1) {
39
-			this.clients[i].send(data, opts);
40
-		}
41
-		else {
42
-			console.log( 'Error: Client ('+i+') not connected.' );
43
-		}
44
-	}
45
-};
46
-
47
-
48
-// HTTP Server to accept incomming MPEG Stream
49
-var streamServer = require('http').createServer( function(request, response) {
50
-	var params = request.url.substr(1).split('/');
51
-
52
-	if( params[0] == STREAM_SECRET ) {
53
-		response.connection.setTimeout(0);
54
-		
55
-		width = (params[1] || 320)|0;
56
-		height = (params[2] || 240)|0;
57
-		
58
-		console.log(
59
-			'Stream Connected: ' + request.socket.remoteAddress + 
60
-			':' + request.socket.remotePort + ' size: ' + width + 'x' + height
61
-		);
62
-		request.on('data', function(data){
63
-			socketServer.broadcast(data, {binary:true});
64
-		});
65
-	}
66
-	else {
67
-		console.log(
68
-			'Failed Stream Connection: '+ request.socket.remoteAddress + 
69
-			request.socket.remotePort + ' - wrong secret.'
70
-		);
71
-		response.end();
72
-	}
73
-}).listen(STREAM_PORT);
74
-
75
-console.log('Listening for MPEG Stream on http://127.0.0.1:'+STREAM_PORT+'/<secret>/<width>/<height>');
76
-console.log('Awaiting WebSocket connections on ws://127.0.0.1:'+WEBSOCKET_PORT+'/');

+ 22 - 0
view-stream.html Ver fichero

@@ -0,0 +1,22 @@
1
+<!DOCTYPE html>
2
+<html>
3
+<head>
4
+	<title>JSMpeg Stream Client</title>
5
+	<style type="text/css">
6
+		html, body {
7
+			background-color: #111;
8
+			text-align: center;
9
+		}
10
+	</style>
11
+	
12
+</head>
13
+<body>
14
+	<canvas id="video-canvas"></canvas>
15
+	<script type="text/javascript" src="jsmpeg.min.js"></script>
16
+	<script type="text/javascript">
17
+		var canvas = document.getElementById('video-canvas');
18
+		var url = 'ws://'+document.location.hostname+':8082/';
19
+		var player = new JSMpeg.Player(url, {canvas: canvas});
20
+	</script>
21
+</body>
22
+</html>

+ 89 - 0
websocket-relay.js Ver fichero

@@ -0,0 +1,89 @@
1
+// Use the websocket-relay to serve a raw MPEG-TS over WebSockets. You can use
2
+// ffmpeg to feed the relay. ffmpeg -> websocket-relay -> browser
3
+// Example:
4
+// node websocket-relay yoursecret 8081 8082
5
+// ffmpeg -i <some input> -f mpegts http://localhost:8081/yoursecret
6
+
7
+var fs = require('fs'),
8
+	http = require('http'),
9
+	WebSocket = require('ws');
10
+
11
+if (process.argv.length < 3) {
12
+	console.log(
13
+		'Usage: \n' +
14
+		'node websocket-relay.js <secret> [<stream-port> <websocket-port>]'
15
+	);
16
+	process.exit();
17
+}
18
+
19
+var STREAM_SECRET = process.argv[2],
20
+	STREAM_PORT = process.argv[3] || 8081,
21
+	WEBSOCKET_PORT = process.argv[4] || 8082,
22
+	RECORD_STREAM = false;
23
+
24
+// Websocket Server
25
+var socketServer = new WebSocket.Server({port: WEBSOCKET_PORT, perMessageDeflate: false});
26
+socketServer.connectionCount = 0;
27
+socketServer.on('connection', function(socket) {
28
+	socketServer.connectionCount++;
29
+	console.log(
30
+		'New WebSocket Connection: ', 
31
+		socket.upgradeReq.socket.remoteAddress,
32
+		socket.upgradeReq.headers['user-agent'],
33
+		'('+socketServer.connectionCount+' total)'
34
+	);
35
+	socket.on('close', function(code, message){
36
+		socketServer.connectionCount--;
37
+		console.log(
38
+			'Disconnected WebSocket ('+socketServer.connectionCount+' total)'
39
+		);
40
+	});
41
+});
42
+socketServer.broadcast = function(data) {
43
+	socketServer.clients.forEach(function each(client) {
44
+		if (client.readyState === WebSocket.OPEN) {
45
+			client.send(data);
46
+		}
47
+	});
48
+};
49
+
50
+// HTTP Server to accept incomming MPEG-TS Stream from ffmpeg
51
+var streamServer = http.createServer( function(request, response) {
52
+	var params = request.url.substr(1).split('/');
53
+
54
+	if (params[0] !== STREAM_SECRET) {
55
+		console.log(
56
+			'Failed Stream Connection: '+ request.socket.remoteAddress + ':' +
57
+			request.socket.remotePort + ' - wrong secret.'
58
+		);
59
+		response.end();
60
+	}
61
+
62
+	response.connection.setTimeout(0);
63
+	console.log(
64
+		'Stream Connected: ' + 
65
+		request.socket.remoteAddress + ':' +
66
+		request.socket.remotePort
67
+	);
68
+	request.on('data', function(data){
69
+		socketServer.broadcast(data);
70
+		if (request.socket.recording) {
71
+			request.socket.recording.write(data);
72
+		}
73
+	});
74
+	request.on('end',function(){
75
+		console.log('close');
76
+		if (request.socket.recording) {
77
+			request.socket.recording.close();
78
+		}
79
+	});
80
+
81
+	// Record the stream to a local file?
82
+	if (RECORD_STREAM) {
83
+		var path = 'recordings/' + Date.now() + '.ts';
84
+		request.socket.recording = fs.createWriteStream(path);
85
+	}
86
+}).listen(STREAM_PORT);
87
+
88
+console.log('Listening for incomming MPEG-TS Stream on http://127.0.0.1:'+STREAM_PORT+'/<secret>');
89
+console.log('Awaiting WebSocket connections on ws://127.0.0.1:'+WEBSOCKET_PORT+'/');