增加rdp协议声音输出功能

This commit is contained in:
dushixiang
2021-02-15 19:06:23 +08:00
parent 630e7d3476
commit b8de79fb49
4 changed files with 36 additions and 5 deletions

View File

@ -111,7 +111,7 @@ func CloseSessionById(sessionId string, code int, reason string) {
defer mutex.Unlock() defer mutex.Unlock()
observable, _ := global.Store.Get(sessionId) observable, _ := global.Store.Get(sessionId)
if observable != nil { if observable != nil {
logrus.Debugf("会话%v创建者退出", sessionId) logrus.Debugf("会话%v创建者退出,原因:%v", sessionId, reason)
observable.Subject.Close(code, reason) observable.Subject.Close(code, reason)
for i := 0; i < len(observable.Observers); i++ { for i := 0; i < len(observable.Observers); i++ {

View File

@ -201,6 +201,9 @@ func TunEndpoint(c echo.Context) error {
} }
break break
} }
if len(instruction) == 0 {
continue
}
err = ws.WriteMessage(websocket.TextMessage, instruction) err = ws.WriteMessage(websocket.TextMessage, instruction)
if err != nil { if err != nil {
if connectionId == "" { if connectionId == "" {

View File

@ -153,13 +153,13 @@ func NewTunnel(address string, config Configuration) (ret *Tunnel, err error) {
if err := ret.WriteInstructionAndFlush(NewInstruction("size", width, height, dpi)); err != nil { if err := ret.WriteInstructionAndFlush(NewInstruction("size", width, height, dpi)); err != nil {
return nil, err return nil, err
} }
if err := ret.WriteInstructionAndFlush(NewInstruction("audio")); err != nil { if err := ret.WriteInstructionAndFlush(NewInstruction("audio", "audio/L8", "audio/L16")); err != nil {
return nil, err return nil, err
} }
if err := ret.WriteInstructionAndFlush(NewInstruction("video")); err != nil { if err := ret.WriteInstructionAndFlush(NewInstruction("video")); err != nil {
return nil, err return nil, err
} }
if err := ret.WriteInstructionAndFlush(NewInstruction("image")); err != nil { if err := ret.WriteInstructionAndFlush(NewInstruction("image", "image/jpeg", "image/png", "image/webp")); err != nil {
return nil, err return nil, err
} }
if err := ret.WriteInstructionAndFlush(NewInstruction("timezone", "Asia/Shanghai")); err != nil { if err := ret.WriteInstructionAndFlush(NewInstruction("timezone", "Asia/Shanghai")); err != nil {
@ -246,7 +246,14 @@ func (opt *Tunnel) ReadInstruction() (instruction Instruction, err error) {
func (opt *Tunnel) Read() (p []byte, err error) { func (opt *Tunnel) Read() (p []byte, err error) {
p, err = opt.rw.ReadBytes(Delimiter) p, err = opt.rw.ReadBytes(Delimiter)
//fmt.Printf("<- %v \n", string(p)) //fmt.Printf("<- %v \n", string(p))
return s := string(p)
if s == "rate=44100,channels=2;" {
return make([]byte, 0), nil
}
if s == "5.audio,1.1,31.audio/L16;" {
s += "rate=44100,channels=2;"
}
return []byte(s), err
} }
func (opt *Tunnel) expect(opcode string) (instruction Instruction, err error) { func (opt *Tunnel) expect(opcode string) (instruction Instruction, err error) {

View File

@ -151,6 +151,8 @@ class Access extends Component {
this.onWindowResize(null); this.onWindowResize(null);
message.destroy(); message.destroy();
message.success('连接成功'); message.success('连接成功');
console.log('requestAudioStream')
this.requestAudioStream();
// 向后台发送请求,更新会话的状态 // 向后台发送请求,更新会话的状态
this.updateSessionStatus(this.state.sessionId).then(_ => { this.updateSessionStatus(this.state.sessionId).then(_ => {
}) })
@ -267,6 +269,7 @@ class Access extends Component {
} }
clientClipboardReceived = (stream, mimetype) => { clientClipboardReceived = (stream, mimetype) => {
console.log('clientClipboardReceived', mimetype)
let reader; let reader;
// If the received data is text, read it as a simple string // If the received data is text, read it as a simple string
@ -527,6 +530,24 @@ class Access extends Component {
} }
} }
requestAudioStream = () => {
let client = this.state.client;
// Create new audio stream, associating it with an AudioRecorder
const stream = client.createAudioStream('audio/L16;rate=44100,channels=2');
const recorder = Guacamole.AudioRecorder.getInstance(stream, 'audio/L16;rate=44100,channels=2');
// If creation of the AudioRecorder failed, simply end the stream
if (!recorder)
stream.sendEnd();
// Otherwise, ensure that another audio stream is created after this
// audio stream is closed
else
recorder.onclose = () => {
console.log('audio closed')
};
}
render() { render() {
const menu = ( const menu = (