可参考此文档或VUE示例程序。 注意:打开外接屏渲染时,建议原始屏停止视频流渲染;及时清理定时器和remote上无关的数据,降低性能消耗
// 打开外接屛
ipcMain.on("openWindow", (event, arg) => {
if (arg) {
const displays = screen.getAllDisplays();
const externalDispaly = displays.find((display) => {
return display.bounds.x !== 0 || display.bounds.y !== 0;
});
if (externalWindow) {
externalWindow.close();
}
if (externalDispaly) {
externalWindow = new BrowserWindow({
x: externalDispaly.bounds.x + 50,
y: externalDispaly.bounds.y + 50,
width: 1000,
height: 660,
backgroundColor: "#fff",
titleBarStyle: "hidden",
webPreferences: { nodeIntegration: true, enableRemoteModule: true },
title: "小鱼Electron 外接屛幕",
icon: path.join(__static, "logo.png"),
});
if (isDevelopment) {
externalWindow.loadURL(process.env.WEBPACK_DEV_SERVER_URL + "#/external");
} else {
externalWindow.loadURL(
formatUrl({
pathname: path.join(__dirname, "index.html"),
protocol: "file",
slashes: false,
hash: "externel",
})
);
}
// 监听页面是否加载完成,完成之后则开始进行数据的传递
externalWindow.webContents.on("did-finish-load", () => {
win.webContents.send("domReady", true);
});
externalWindow.once("ready-to-show", () => {
externalWindow.show();
});
// 外接屏关闭时,需通知原始屏停止定时获取videoFrame数据。
externalWindow.on("closed", () => {
externalWindow = null;
if (win) {
win.webContents.send("closedExternalWindow", true);
}
});
} else {
// 通知原始屏 没有外接屏
win.webContents.send("secondWindow", false);
}
}
});
// 监听页面是否加载完成
ipcRenderer.on("domReady", (event, msg) => {
if (msg) {
// 关闭原始屏的视频流渲染
this.xyRTC.stopAllVideoRender();
// 发送layout数据, 注意:videoStreams 变化时,需要重新推送layout数据,保证双屏数据一致。
ipcRenderer.send("externelLayout", {
layout: this.layout,
});
// 存储videoFrame数据,见 第三步
}
});
// 在主线程上提前定义好remote,存储视频流数据
global.sharedObject = {
videoFrames: {},
};
// 原始屏:传递回调函数,在remote上设置sourceId对应的videoFrame
xyRTC.startExternal(({ sourceId, videoFrame }) => {
if (videoFrame && videoFrame.hasData) {
const temp = remote.getGlobal("sharedObject").videoFrames;
if (temp[sourceId]) {
remote.getGlobal("sharedObject").videoFrames[
sourceId
] = videoFrame;
} else {
remote.getGlobal("sharedObject").videoFrames = {
...temp,
[sourceId]: {},
};
}
}
});
// 建立自己的render
setRender(sourceId) {
if (sourceId && !this.renderMap.get(sourceId)) {
const render = new Render(this.$refs.videoRef);
this.renderMap.set(sourceId, render);
}
}
if (sourceId && !this.videoRenderTimmer && state === 5) {
// 每秒30帧渲染
this.videoRenderTimmer = xyTimer.setInterval(
sourceId,
() => {
this.drawBySourceId(sourceId);
},
33.33
);
}
if ((!sourceId && this.videoRenderTimmer) || state !== 5) {
this.clearTimer();
}
// 外接屏 渲染
drawExternalVideoFrame(id, videoFrame) {
const render = this.renderMap.get(id);
if (render) {
render.draw(
videoFrame.buffer,
videoFrame.width,
videoFrame.height,
videoFrame.rotation
);
}
},
// 获取当前sourceId的videoFrame, 通过render进行渲染
drawBySourceId() {
const arr = Array.prototype.slice.call(arguments);
const sourceId = arr[0];
const videoFrame = remote.getGlobal("sharedObject").videoFrames[sourceId];
videoFrame?.hasData && this.drawExternalVideoFrame(sourceId, videoFrame);
}
ipcRenderer.on("closedExternalWindow", (event, msg) => {
if (msg) {
remote.getGlobal("sharedObject").videoFrames = {};
xyRTC.stopExternal();
}
})
// 传递回调函数,sdk会每秒30帧返回sourceId对应的videoFrame
xyRTC.startExternal(callback:ICallback);
type ICallback = ({ sourceId, videoFrame }) => void;
// 可参考以下处理,在主线程的remote上存储每个sourceId对应的videoFrame
const callback = ({ sourceId, videoFrame }) => {
if (videoFrame && videoFrame.hasData) {
const temp = remote.getGlobal("sharedObject").videoFrames;
if (temp[sourceId]) {
remote.getGlobal("sharedObject").videoFrames[
sourceId
] = videoFrame;
} else {
remote.getGlobal("sharedObject").videoFrames = {
...temp,
[sourceId]: {},
};
}
}
}
xyRTC.stopExternal();
关闭原始屏的视频流渲染
xyRTC.stopAllVideoRender();
渲染器
import { Render } from "@xylink/xy-electron-sdk";
const render = new Render(canvasElement);
render.draw(bufferData, width, height, rotation);
定时器: 按照屏幕刷新率回调,更稳定的轮询回调
import { xyTimer } from "@xylink/xy-electron-sdk";
xy.setInterval(key, cb, interval);
xy.clearInterval(key);